forked from Mirror/ollama4j
		
	Compare commits
	
		
			27 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					777ee7ffe0 | ||
| 
						 | 
					dcf1d0bdbc | ||
| 
						 | 
					13b7111a42 | ||
| 
						 | 
					09442d37a3 | ||
| 
						 | 
					1e66bdb07f | ||
| 
						 | 
					b423090db9 | ||
| 
						 | 
					a32d94efbf | ||
| 
						 | 
					31f8302849 | ||
| 
						 | 
					6487756764 | ||
| 
						 | 
					abb76ad867 | ||
| 
						 | 
					cf4e7a96e8 | ||
| 
						 | 
					0f414f71a3 | ||
| 
						 | 
					2b700fdad8 | ||
| 
						 | 
					06c5daa253 | ||
| 
						 | 
					91aab6cbd1 | ||
| 
						 | 
					f38a00ebdc | ||
| 
						 | 
					0f73ea75ab | ||
| 
						 | 
					8fe869afdb | ||
| 
						 | 
					2d274c4f5b | ||
| 
						 | 
					713a3239a4 | ||
| 
						 | 
					a9e7958d44 | ||
| 
						 | 
					f38e84053f | ||
| 
						 | 
					7eb16b7ba0 | ||
| 
						 | 
					5a3889d8ee | ||
| 
						 | 
					e9621f054d | ||
| 
						 | 
					b41b62220c | ||
| 
						 | 
					c89440cbca | 
@@ -41,6 +41,41 @@ You will get a response similar to:
 | 
				
			|||||||
> require
 | 
					> require
 | 
				
			||||||
> natural language understanding and generation capabilities.
 | 
					> natural language understanding and generation capabilities.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Try asking a question, receiving the answer streamed
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String host = "http://localhost:11434/";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					        // define a stream handler (Consumer<String>)
 | 
				
			||||||
 | 
					        OllamaStreamHandler streamHandler = (s) -> {
 | 
				
			||||||
 | 
					           System.out.println(s);
 | 
				
			||||||
 | 
					        };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        // Should be called using seperate thread to gain non blocking streaming effect.
 | 
				
			||||||
 | 
					        OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
				
			||||||
 | 
					          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
 | 
					          new OptionsBuilder().build(), streamHandler);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        System.out.println("Full response: " +result.getResponse());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					> The
 | 
				
			||||||
 | 
					> The capital
 | 
				
			||||||
 | 
					> The capital of
 | 
				
			||||||
 | 
					> The capital of France
 | 
				
			||||||
 | 
					> The capital of France is 
 | 
				
			||||||
 | 
					> The capital of France is Paris
 | 
				
			||||||
 | 
					> The capital of France is Paris.
 | 
				
			||||||
 | 
					> Full response: The capital of France is Paris.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Try asking a question from general topics.
 | 
					## Try asking a question from general topics.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -69,6 +69,41 @@ You will get a response similar to:
 | 
				
			|||||||
  } ]
 | 
					  } ]
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Create a conversation where the answer is streamed
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String host = "http://localhost:11434/";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
 | 
					        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
 | 
					                "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
 | 
					            .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        // define a handler (Consumer<String>)
 | 
				
			||||||
 | 
					        OllamaStreamHandler streamHandler = (s) -> {
 | 
				
			||||||
 | 
					           System.out.println(s);
 | 
				
			||||||
 | 
					        };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaChatResult chatResult = ollamaAPI.chat(requestModel,streamHandler);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					> The
 | 
				
			||||||
 | 
					> The capital
 | 
				
			||||||
 | 
					> The capital of
 | 
				
			||||||
 | 
					> The capital of France
 | 
				
			||||||
 | 
					> The capital of France is 
 | 
				
			||||||
 | 
					> The capital of France is Paris
 | 
				
			||||||
 | 
					> The capital of France is Paris.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Create a new conversation with individual system prompt
 | 
					## Create a new conversation with individual system prompt
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										10
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								pom.xml
									
									
									
									
									
								
							@@ -4,7 +4,7 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.amithkoujalgi</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.52</version>
 | 
					    <version>1.0.55</version>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <name>Ollama4j</name>
 | 
					    <name>Ollama4j</name>
 | 
				
			||||||
    <description>Java library for interacting with Ollama API.</description>
 | 
					    <description>Java library for interacting with Ollama API.</description>
 | 
				
			||||||
@@ -39,7 +39,7 @@
 | 
				
			|||||||
        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
					        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
				
			||||||
        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
					        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
				
			||||||
        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
					        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
				
			||||||
        <tag>v1.0.52</tag>
 | 
					        <tag>v1.0.55</tag>
 | 
				
			||||||
    </scm>
 | 
					    </scm>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <build>
 | 
					    <build>
 | 
				
			||||||
@@ -174,6 +174,12 @@
 | 
				
			|||||||
            <version>4.1.0</version>
 | 
					            <version>4.1.0</version>
 | 
				
			||||||
            <scope>test</scope>
 | 
					            <scope>test</scope>
 | 
				
			||||||
        </dependency>
 | 
					        </dependency>
 | 
				
			||||||
 | 
					        <dependency>
 | 
				
			||||||
 | 
					            <groupId>org.json</groupId>
 | 
				
			||||||
 | 
					            <artifactId>json</artifactId>
 | 
				
			||||||
 | 
					            <version>20240205</version>
 | 
				
			||||||
 | 
					            <scope>test</scope>
 | 
				
			||||||
 | 
					        </dependency>
 | 
				
			||||||
    </dependencies>
 | 
					    </dependencies>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <distributionManagement>
 | 
					    <distributionManagement>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -6,6 +6,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
 | 
				
			|||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
 | 
				
			||||||
@@ -341,13 +342,24 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @param options the Options object - <a
 | 
					   * @param options the Options object - <a
 | 
				
			||||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
					   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
				
			||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
 | 
					   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
				
			||||||
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
 | 
					    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  /**
 | 
				
			||||||
 | 
					   * Convenience method to call Ollama API without streaming responses.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
  public OllamaResult generate(String model, String prompt, Options options)
 | 
					  public OllamaResult generate(String model, String prompt, Options options)
 | 
				
			||||||
  throws OllamaBaseException, IOException, InterruptedException {
 | 
					  throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
					    return generate(model, prompt, options,null);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					 | 
				
			||||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
					 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
@@ -360,7 +372,7 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @return the ollama async result callback handle
 | 
					   * @return the ollama async result callback handle
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
 | 
					  public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    URI uri = URI.create(this.host + "/api/generate");
 | 
					    URI uri = URI.create(this.host + "/api/generate");
 | 
				
			||||||
    OllamaAsyncResultCallback ollamaAsyncResultCallback =
 | 
					    OllamaAsyncResultCallback ollamaAsyncResultCallback =
 | 
				
			||||||
@@ -380,20 +392,32 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @param options the Options object - <a
 | 
					   * @param options the Options object - <a
 | 
				
			||||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
					   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
				
			||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
 | 
					   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaResult generateWithImageFiles(
 | 
					  public OllamaResult generateWithImageFiles(
 | 
				
			||||||
      String model, String prompt, List<File> imageFiles, Options options)
 | 
					      String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
    List<String> images = new ArrayList<>();
 | 
					    List<String> images = new ArrayList<>();
 | 
				
			||||||
    for (File imageFile : imageFiles) {
 | 
					    for (File imageFile : imageFiles) {
 | 
				
			||||||
      images.add(encodeFileToBase64(imageFile));
 | 
					      images.add(encodeFileToBase64(imageFile));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
					    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					   /**
 | 
				
			||||||
 | 
					   * Convenience method to call Ollama API without streaming responses.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaResult generateWithImageFiles(
 | 
				
			||||||
 | 
					    String model, String prompt, List<File> imageFiles, Options options)
 | 
				
			||||||
 | 
					    throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
 | 
					      return generateWithImageFiles(model, prompt, imageFiles, options, null);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
   * With one or more image URLs, ask a question to a model running on Ollama server. This is a
 | 
					   * With one or more image URLs, ask a question to a model running on Ollama server. This is a
 | 
				
			||||||
   * sync/blocking call.
 | 
					   * sync/blocking call.
 | 
				
			||||||
@@ -404,18 +428,30 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @param options the Options object - <a
 | 
					   * @param options the Options object - <a
 | 
				
			||||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
					   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
				
			||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
 | 
					   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaResult generateWithImageURLs(
 | 
					  public OllamaResult generateWithImageURLs(
 | 
				
			||||||
      String model, String prompt, List<String> imageURLs, Options options)
 | 
					      String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
				
			||||||
    List<String> images = new ArrayList<>();
 | 
					    List<String> images = new ArrayList<>();
 | 
				
			||||||
    for (String imageURL : imageURLs) {
 | 
					    for (String imageURL : imageURLs) {
 | 
				
			||||||
      images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
					      images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
					    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  /**
 | 
				
			||||||
 | 
					   * Convenience method to call Ollama API without streaming responses.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
 | 
				
			||||||
 | 
					      Options options)
 | 
				
			||||||
 | 
					      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
				
			||||||
 | 
					    return generateWithImageURLs(model, prompt, imageURLs, options, null);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -448,12 +484,31 @@ public class OllamaAPI {
 | 
				
			|||||||
  * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					  * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaChatResult chat(OllamaChatRequestModel request)  throws OllamaBaseException, IOException, InterruptedException{
 | 
					  public OllamaChatResult chat(OllamaChatRequestModel request)  throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
    OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					    return chat(request,null);
 | 
				
			||||||
    //TODO: implement async way
 | 
					  }
 | 
				
			||||||
    if(request.isStream()){
 | 
					
 | 
				
			||||||
      throw new UnsupportedOperationException("Streamed chat responses are not implemented yet");
 | 
					  /**
 | 
				
			||||||
 | 
					   * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. 
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * @param request request object to be sent to the server
 | 
				
			||||||
 | 
					   * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
 | 
				
			||||||
 | 
					   * @return 
 | 
				
			||||||
 | 
					  * @throws OllamaBaseException any response code than 200 has been returned
 | 
				
			||||||
 | 
					  * @throws IOException in case the responseStream can not be read
 | 
				
			||||||
 | 
					  * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler)  throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
 | 
					    OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
 | 
					    OllamaResult result;
 | 
				
			||||||
 | 
					    if(streamHandler != null){
 | 
				
			||||||
 | 
					      request.setStream(true);
 | 
				
			||||||
 | 
					      result = requestCaller.call(request, streamHandler);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    else {
 | 
				
			||||||
 | 
					     result = requestCaller.callSync(request);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaResult result = requestCaller.generateSync(request);
 | 
					 | 
				
			||||||
    return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
 | 
					    return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -467,10 +522,19 @@ public class OllamaAPI {
 | 
				
			|||||||
    return Base64.getEncoder().encodeToString(bytes);
 | 
					    return Base64.getEncoder().encodeToString(bytes);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private OllamaResult generateSyncForOllamaRequestModel(OllamaRequestModel ollamaRequestModel)
 | 
					  private OllamaResult generateSyncForOllamaRequestModel(
 | 
				
			||||||
 | 
					      OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					    OllamaGenerateEndpointCaller requestCaller =
 | 
				
			||||||
        return requestCaller.generateSync(ollamaRequestModel);
 | 
					        new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
 | 
					    OllamaResult result;
 | 
				
			||||||
 | 
					    if (streamHandler != null) {
 | 
				
			||||||
 | 
					      ollamaRequestModel.setStream(true);
 | 
				
			||||||
 | 
					      result = requestCaller.call(ollamaRequestModel, streamHandler);
 | 
				
			||||||
 | 
					    } else {
 | 
				
			||||||
 | 
					      result = requestCaller.callSync(ollamaRequestModel);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    return result;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,7 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.function.Consumer;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public interface OllamaStreamHandler extends Consumer<String>{
 | 
				
			||||||
 | 
					    void accept(String message);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,6 +1,8 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -34,4 +36,13 @@ public class Model {
 | 
				
			|||||||
    return name.split(":")[1];
 | 
					    return name.split(":")[1];
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
import java.util.Map;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -16,5 +17,14 @@ public class ModelDetail {
 | 
				
			|||||||
  private String parameters;
 | 
					  private String parameters;
 | 
				
			||||||
  private String template;
 | 
					  private String template;
 | 
				
			||||||
  private String system;
 | 
					  private String system;
 | 
				
			||||||
  private Map<String, String> details;
 | 
					  private ModelMeta details;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -21,4 +23,13 @@ public class ModelMeta {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  @JsonProperty("quantization_level")
 | 
					  @JsonProperty("quantization_level")
 | 
				
			||||||
  private String quantizationLevel;
 | 
					  private String quantizationLevel;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,8 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import java.io.BufferedReader;
 | 
					import java.io.BufferedReader;
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
@@ -22,7 +24,7 @@ import lombok.Getter;
 | 
				
			|||||||
@SuppressWarnings("unused")
 | 
					@SuppressWarnings("unused")
 | 
				
			||||||
public class OllamaAsyncResultCallback extends Thread {
 | 
					public class OllamaAsyncResultCallback extends Thread {
 | 
				
			||||||
  private final HttpRequest.Builder requestBuilder;
 | 
					  private final HttpRequest.Builder requestBuilder;
 | 
				
			||||||
  private final OllamaRequestModel ollamaRequestModel;
 | 
					  private final OllamaGenerateRequestModel ollamaRequestModel;
 | 
				
			||||||
  private final Queue<String> queue = new LinkedList<>();
 | 
					  private final Queue<String> queue = new LinkedList<>();
 | 
				
			||||||
  private String result;
 | 
					  private String result;
 | 
				
			||||||
  private boolean isDone;
 | 
					  private boolean isDone;
 | 
				
			||||||
@@ -47,7 +49,7 @@ public class OllamaAsyncResultCallback extends Thread {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  public OllamaAsyncResultCallback(
 | 
					  public OllamaAsyncResultCallback(
 | 
				
			||||||
      HttpRequest.Builder requestBuilder,
 | 
					      HttpRequest.Builder requestBuilder,
 | 
				
			||||||
      OllamaRequestModel ollamaRequestModel,
 | 
					      OllamaGenerateRequestModel ollamaRequestModel,
 | 
				
			||||||
      long requestTimeoutSeconds) {
 | 
					      long requestTimeoutSeconds) {
 | 
				
			||||||
    this.requestBuilder = requestBuilder;
 | 
					    this.requestBuilder = requestBuilder;
 | 
				
			||||||
    this.ollamaRequestModel = ollamaRequestModel;
 | 
					    this.ollamaRequestModel = ollamaRequestModel;
 | 
				
			||||||
@@ -87,8 +89,8 @@ public class OllamaAsyncResultCallback extends Thread {
 | 
				
			|||||||
            queue.add(ollamaResponseModel.getError());
 | 
					            queue.add(ollamaResponseModel.getError());
 | 
				
			||||||
            responseBuffer.append(ollamaResponseModel.getError());
 | 
					            responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
          } else {
 | 
					          } else {
 | 
				
			||||||
            OllamaResponseModel ollamaResponseModel =
 | 
					            OllamaGenerateResponseModel ollamaResponseModel =
 | 
				
			||||||
                Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
 | 
					                Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
				
			||||||
            queue.add(ollamaResponseModel.getResponse());
 | 
					            queue.add(ollamaResponseModel.getResponse());
 | 
				
			||||||
            if (!ollamaResponseModel.isDone()) {
 | 
					            if (!ollamaResponseModel.isDone()) {
 | 
				
			||||||
              responseBuffer.append(ollamaResponseModel.getResponse());
 | 
					              responseBuffer.append(ollamaResponseModel.getResponse());
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,35 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonInclude;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@JsonInclude(JsonInclude.Include.NON_NULL)
 | 
				
			||||||
 | 
					public abstract class OllamaCommonRequestModel {
 | 
				
			||||||
 | 
					  
 | 
				
			||||||
 | 
					  protected String model;  
 | 
				
			||||||
 | 
					  @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
 | 
				
			||||||
 | 
					  @JsonProperty(value = "format")
 | 
				
			||||||
 | 
					  protected Boolean returnFormatJson;
 | 
				
			||||||
 | 
					  protected Map<String, Object> options;
 | 
				
			||||||
 | 
					  protected String template;
 | 
				
			||||||
 | 
					  protected boolean stream;
 | 
				
			||||||
 | 
					  @JsonProperty(value = "keep_alive")
 | 
				
			||||||
 | 
					  protected String keepAlive;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,39 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import java.util.List;
 | 
					 | 
				
			||||||
import java.util.Map;
 | 
					 | 
				
			||||||
import lombok.Data;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@Data
 | 
					 | 
				
			||||||
public class OllamaRequestModel implements OllamaRequestBody{
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  private String model;
 | 
					 | 
				
			||||||
  private String prompt;
 | 
					 | 
				
			||||||
  private List<String> images;
 | 
					 | 
				
			||||||
  private Map<String, Object> options;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaRequestModel(String model, String prompt) {
 | 
					 | 
				
			||||||
    this.model = model;
 | 
					 | 
				
			||||||
    this.prompt = prompt;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaRequestModel(String model, String prompt, List<String> images) {
 | 
					 | 
				
			||||||
    this.model = model;
 | 
					 | 
				
			||||||
    this.prompt = prompt;
 | 
					 | 
				
			||||||
    this.images = images;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public String toString() {
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
					 | 
				
			||||||
    } catch (JsonProcessingException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -83,12 +83,12 @@ public class OllamaChatRequestBuilder {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withOptions(Options options){
 | 
					    public OllamaChatRequestBuilder withOptions(Options options){
 | 
				
			||||||
        this.request.setOptions(options);
 | 
					        this.request.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withFormat(String format){
 | 
					    public OllamaChatRequestBuilder withGetJsonResponse(){
 | 
				
			||||||
        this.request.setFormat(format);
 | 
					        this.request.setReturnFormatJson(true);
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,47 +1,39 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import lombok.Getter;
 | 
				
			||||||
 | 
					import lombok.Setter;
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					 | 
				
			||||||
import lombok.Data;
 | 
					 | 
				
			||||||
import lombok.NonNull;
 | 
					 | 
				
			||||||
import lombok.RequiredArgsConstructor;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Defines a Request to use against the ollama /api/chat endpoint.
 | 
					 * Defines a Request to use against the ollama /api/chat endpoint.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @see <a
 | 
					 * @see <a href=
 | 
				
			||||||
 *     href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
 | 
					 *      "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
 | 
				
			||||||
 *      Chat Completion</a>
 | 
					 *      Chat Completion</a>
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
@Data
 | 
					@Getter
 | 
				
			||||||
@AllArgsConstructor
 | 
					@Setter
 | 
				
			||||||
@RequiredArgsConstructor
 | 
					public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
 | 
				
			||||||
public class OllamaChatRequestModel implements OllamaRequestBody {
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @NonNull private String model;
 | 
					  private List<OllamaChatMessage> messages;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @NonNull private List<OllamaChatMessage> messages;
 | 
					  public OllamaChatRequestModel() {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private String format;
 | 
					  public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
 | 
				
			||||||
  private Options options;
 | 
					    this.model = model;
 | 
				
			||||||
  private String template;
 | 
					    this.messages = messages;
 | 
				
			||||||
  private boolean stream;
 | 
					  }
 | 
				
			||||||
  private String keepAlive;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Override
 | 
					  @Override
 | 
				
			||||||
  public String toString() {
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
    try {
 | 
					    if (!(o instanceof OllamaChatRequestModel)) {
 | 
				
			||||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
					      return false;
 | 
				
			||||||
    } catch (JsonProcessingException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return this.toString().equals(o.toString());
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,31 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.ArrayList;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaChatStreamObserver {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaStreamHandler streamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private String message = "";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
				
			||||||
 | 
					        this.streamHandler = streamHandler;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public void notify(OllamaChatResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        responseParts.add(currentResponsePart);
 | 
				
			||||||
 | 
					        handleCurrentResponsePart(currentResponsePart);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    
 | 
				
			||||||
 | 
					    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        message = message + currentResponsePart.getMessage().getContent();
 | 
				
			||||||
 | 
					        streamHandler.accept(message);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,55 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/**
 | 
				
			||||||
 | 
					 * Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel} 
 | 
				
			||||||
 | 
					 * objects using the builder-pattern.
 | 
				
			||||||
 | 
					 */
 | 
				
			||||||
 | 
					public class OllamaGenerateRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateRequestBuilder(String model, String prompt){
 | 
				
			||||||
 | 
					        request = new OllamaGenerateRequestModel(model, prompt);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateRequestModel request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static OllamaGenerateRequestBuilder getInstance(String model){
 | 
				
			||||||
 | 
					        return new OllamaGenerateRequestBuilder(model,"");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestModel build(){
 | 
				
			||||||
 | 
					        return request;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withPrompt(String prompt){
 | 
				
			||||||
 | 
					        request.setPrompt(prompt);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withGetJsonResponse(){
 | 
				
			||||||
 | 
					        this.request.setReturnFormatJson(true);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withOptions(Options options){
 | 
				
			||||||
 | 
					        this.request.setOptions(options.getOptionsMap());
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withTemplate(String template){
 | 
				
			||||||
 | 
					        this.request.setTemplate(template);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withStreaming(){
 | 
				
			||||||
 | 
					        this.request.setStream(true);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){
 | 
				
			||||||
 | 
					        this.request.setKeepAlive(keepAlive);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,46 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import lombok.Getter;
 | 
				
			||||||
 | 
					import lombok.Setter;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Getter
 | 
				
			||||||
 | 
					@Setter
 | 
				
			||||||
 | 
					public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private String prompt;
 | 
				
			||||||
 | 
					  private List<String> images;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private String system;
 | 
				
			||||||
 | 
					  private String context;
 | 
				
			||||||
 | 
					  private boolean raw;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequestModel() {
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequestModel(String model, String prompt) {
 | 
				
			||||||
 | 
					    this.model = model;
 | 
				
			||||||
 | 
					    this.prompt = prompt;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
 | 
				
			||||||
 | 
					    this.model = model;
 | 
				
			||||||
 | 
					    this.prompt = prompt;
 | 
				
			||||||
 | 
					    this.images = images;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
 | 
					    if (!(o instanceof OllamaGenerateRequestModel)) {
 | 
				
			||||||
 | 
					      return false;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return this.toString().equals(o.toString());
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
@@ -8,7 +8,7 @@ import lombok.Data;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@JsonIgnoreProperties(ignoreUnknown = true)
 | 
					@JsonIgnoreProperties(ignoreUnknown = true)
 | 
				
			||||||
public class OllamaResponseModel {
 | 
					public class OllamaGenerateResponseModel {
 | 
				
			||||||
    private String model;
 | 
					    private String model;
 | 
				
			||||||
    private @JsonProperty("created_at") String createdAt;
 | 
					    private @JsonProperty("created_at") String createdAt;
 | 
				
			||||||
    private String response;
 | 
					    private String response;
 | 
				
			||||||
@@ -0,0 +1,31 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.ArrayList;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaGenerateStreamObserver {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaStreamHandler streamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private String message = "";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
 | 
				
			||||||
 | 
					        this.streamHandler = streamHandler;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public void notify(OllamaGenerateResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        responseParts.add(currentResponsePart);
 | 
				
			||||||
 | 
					        handleCurrentResponsePart(currentResponsePart);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    
 | 
				
			||||||
 | 
					    protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        message = message + currentResponsePart.getResponse();
 | 
				
			||||||
 | 
					        streamHandler.accept(message);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,12 +1,19 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
@@ -16,6 +23,8 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaChatStreamObserver streamObserver;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
					    public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
				
			||||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					        super(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -30,6 +39,9 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
        try {
 | 
					        try {
 | 
				
			||||||
            OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
 | 
					            OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
 | 
				
			||||||
            responseBuffer.append(ollamaResponseModel.getMessage().getContent());
 | 
					            responseBuffer.append(ollamaResponseModel.getMessage().getContent());
 | 
				
			||||||
 | 
					            if(streamObserver != null) {
 | 
				
			||||||
 | 
					                streamObserver.notify(ollamaResponseModel);
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
            return ollamaResponseModel.isDone();
 | 
					            return ollamaResponseModel.isDone();
 | 
				
			||||||
        } catch (JsonProcessingException e) {
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
            LOG.error("Error parsing the Ollama chat response!",e);
 | 
					            LOG.error("Error parsing the Ollama chat response!",e);
 | 
				
			||||||
@@ -37,7 +49,11 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
        }         
 | 
					        }         
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
				
			||||||
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
 | 
					        streamObserver = new OllamaChatStreamObserver(streamHandler);
 | 
				
			||||||
 | 
					        return super.callSync(body);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
 
 | 
					 
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -56,7 +56,7 @@ public abstract class OllamaEndpointCaller {
 | 
				
			|||||||
     * @throws IOException in case the responseStream can not be read
 | 
					     * @throws IOException in case the responseStream can not be read
 | 
				
			||||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaResult generateSync(OllamaRequestBody body)  throws OllamaBaseException, IOException, InterruptedException{
 | 
					    public OllamaResult callSync(OllamaRequestBody body)  throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // Create Request
 | 
					        // Create Request
 | 
				
			||||||
    long startTime = System.currentTimeMillis();
 | 
					    long startTime = System.currentTimeMillis();
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,18 +1,25 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResponseModel;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
					public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateStreamObserver streamObserver;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
					    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
				
			||||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);   
 | 
					        super(host, basicAuth, requestTimeoutSeconds, verbose);   
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -25,8 +32,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
    @Override
 | 
					    @Override
 | 
				
			||||||
    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
					    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
				
			||||||
                try {
 | 
					                try {
 | 
				
			||||||
                    OllamaResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
 | 
					                    OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getResponse());
 | 
					                    responseBuffer.append(ollamaResponseModel.getResponse());
 | 
				
			||||||
 | 
					                    if(streamObserver != null) {
 | 
				
			||||||
 | 
					                        streamObserver.notify(ollamaResponseModel);
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
                    return ollamaResponseModel.isDone();
 | 
					                    return ollamaResponseModel.isDone();
 | 
				
			||||||
                } catch (JsonProcessingException e) {
 | 
					                } catch (JsonProcessingException e) {
 | 
				
			||||||
                    LOG.error("Error parsing the Ollama chat response!",e);
 | 
					                    LOG.error("Error parsing the Ollama chat response!",e);
 | 
				
			||||||
@@ -34,7 +44,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
                }         
 | 
					                }         
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    
 | 
					    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
				
			||||||
 | 
					        throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
 | 
					    streamObserver = new OllamaGenerateStreamObserver(streamHandler);
 | 
				
			||||||
 | 
					    return super.callSync(body);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,21 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonGenerator;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.JsonSerializer;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.SerializerProvider;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
 | 
				
			||||||
 | 
					            gen.writeString("json");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public boolean isEmpty(SerializerProvider provider,Boolean value){
 | 
				
			||||||
 | 
					        return !value;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,8 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.ByteArrayOutputStream;
 | 
					 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
import java.io.ObjectOutputStream;
 | 
					 | 
				
			||||||
import java.util.Base64;
 | 
					import java.util.Base64;
 | 
				
			||||||
import java.util.Collection;
 | 
					import java.util.Collection;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -20,11 +18,4 @@ public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> {
 | 
				
			|||||||
        }
 | 
					        }
 | 
				
			||||||
        jsonGenerator.writeEndArray();
 | 
					        jsonGenerator.writeEndArray();
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					 | 
				
			||||||
    public static byte[] serialize(Object obj) throws IOException {
 | 
					 | 
				
			||||||
        ByteArrayOutputStream out = new ByteArrayOutputStream();
 | 
					 | 
				
			||||||
        ObjectOutputStream os = new ObjectOutputStream(out);
 | 
					 | 
				
			||||||
        os.writeObject(obj);
 | 
					 | 
				
			||||||
        return out.toByteArray();
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -4,6 +4,7 @@ import static org.junit.jupiter.api.Assertions.*;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
@@ -23,8 +24,13 @@ import lombok.Data;
 | 
				
			|||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
import org.junit.jupiter.api.Order;
 | 
					import org.junit.jupiter.api.Order;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class TestRealAPIs {
 | 
					class TestRealAPIs {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  OllamaAPI ollamaAPI;
 | 
					  OllamaAPI ollamaAPI;
 | 
				
			||||||
  Config config;
 | 
					  Config config;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -86,6 +92,19 @@ class TestRealAPIs {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testListDtails() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
 | 
				
			||||||
 | 
					      assertNotNull(modelDetails);
 | 
				
			||||||
 | 
					      System.out.println(modelDetails);
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Test
 | 
					  @Test
 | 
				
			||||||
  @Order(3)
 | 
					  @Order(3)
 | 
				
			||||||
  void testAskModelWithDefaultOptions() {
 | 
					  void testAskModelWithDefaultOptions() {
 | 
				
			||||||
@@ -104,6 +123,32 @@ class TestRealAPIs {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testAskModelWithDefaultOptionsStreamed() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
				
			||||||
 | 
					          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
 | 
					          new OptionsBuilder().build(), (s) -> {
 | 
				
			||||||
 | 
					            LOG.info(s);
 | 
				
			||||||
 | 
					            String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					            LOG.info(substring);
 | 
				
			||||||
 | 
					            sb.append(substring);
 | 
				
			||||||
 | 
					          });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      assertNotNull(result);
 | 
				
			||||||
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Test
 | 
					  @Test
 | 
				
			||||||
  @Order(3)
 | 
					  @Order(3)
 | 
				
			||||||
  void testAskModelWithOptions() {
 | 
					  void testAskModelWithOptions() {
 | 
				
			||||||
@@ -164,6 +209,31 @@ class TestRealAPIs {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testChatWithStream() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
 | 
					      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
 | 
					              "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
 | 
					          .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
 | 
				
			||||||
 | 
					        LOG.info(s);
 | 
				
			||||||
 | 
					        String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					        LOG.info(substring);
 | 
				
			||||||
 | 
					        sb.append(substring);
 | 
				
			||||||
 | 
					      });
 | 
				
			||||||
 | 
					      assertNotNull(chatResult);
 | 
				
			||||||
 | 
					      assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Test
 | 
					  @Test
 | 
				
			||||||
  @Order(3)
 | 
					  @Order(3)
 | 
				
			||||||
  void testChatWithImageFromFileWithHistoryRecognition() {
 | 
					  void testChatWithImageFromFileWithHistoryRecognition() {
 | 
				
			||||||
@@ -232,6 +302,30 @@ class TestRealAPIs {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testAskModelWithOptionsAndImageFilesStreamed() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
 | 
				
			||||||
 | 
					          "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
 | 
				
			||||||
 | 
					            LOG.info(s);
 | 
				
			||||||
 | 
					            String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					            LOG.info(substring);
 | 
				
			||||||
 | 
					            sb.append(substring);
 | 
				
			||||||
 | 
					          });
 | 
				
			||||||
 | 
					      assertNotNull(result);
 | 
				
			||||||
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Test
 | 
					  @Test
 | 
				
			||||||
  @Order(3)
 | 
					  @Order(3)
 | 
				
			||||||
  void testAskModelWithOptionsAndImageURLs() {
 | 
					  void testAskModelWithOptionsAndImageURLs() {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,106 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.fail;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.File;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import org.json.JSONObject;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.ObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class TestChatRequestSerialization {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaChatRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private ObjectMapper mapper = Utils.getObjectMapper();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @BeforeEach
 | 
				
			||||||
 | 
					    public void init() {
 | 
				
			||||||
 | 
					        builder = OllamaChatRequestBuilder.getInstance("DummyModel");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
				
			||||||
 | 
					                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestMultipleMessages() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
 | 
				
			||||||
 | 
					        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
 | 
					        .build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestWithMessageAndImage() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
				
			||||||
 | 
					                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
 | 
					                .withOptions(b.setMirostat(1).build()).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
 | 
					                .withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
				
			||||||
 | 
					        // of request is never used in real code anyways
 | 
				
			||||||
 | 
					        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
				
			||||||
 | 
					        String requestFormatProperty = jsonObject.getString("format");
 | 
				
			||||||
 | 
					        assertEquals("json", requestFormatProperty);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private String serializeRequest(OllamaChatRequestModel req) {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            return mapper.writeValueAsString(req);
 | 
				
			||||||
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					            fail("Could not serialize request!", e);
 | 
				
			||||||
 | 
					            return null;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaChatRequestModel deserializeRequest(String jsonRequest) {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            return mapper.readValue(jsonRequest, OllamaChatRequestModel.class);
 | 
				
			||||||
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					            fail("Could not deserialize jsonRequest!", e);
 | 
				
			||||||
 | 
					            return null;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private void assertEqualsAfterUnmarshalling(OllamaChatRequestModel unmarshalledRequest,
 | 
				
			||||||
 | 
					            OllamaChatRequestModel req) {
 | 
				
			||||||
 | 
					        assertEquals(req, unmarshalledRequest);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,85 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.fail;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import org.json.JSONObject;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.ObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class TestGenerateRequestSerialization {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private ObjectMapper mapper = Utils.getObjectMapper();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @BeforeEach
 | 
				
			||||||
 | 
					    public void init() {
 | 
				
			||||||
 | 
					        builder = OllamaGenerateRequestBuilder.getInstance("DummyModel");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel req =
 | 
				
			||||||
 | 
					                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel req =
 | 
				
			||||||
 | 
					                builder.withPrompt("Some prompt").withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
				
			||||||
 | 
					        // of request is never used in real code anyways
 | 
				
			||||||
 | 
					        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
				
			||||||
 | 
					        String requestFormatProperty = jsonObject.getString("format");
 | 
				
			||||||
 | 
					        assertEquals("json", requestFormatProperty);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private String serializeRequest(OllamaGenerateRequestModel req) {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            return mapper.writeValueAsString(req);
 | 
				
			||||||
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					            fail("Could not serialize request!", e);
 | 
				
			||||||
 | 
					            return null;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateRequestModel deserializeRequest(String jsonRequest) {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            return mapper.readValue(jsonRequest, OllamaGenerateRequestModel.class);
 | 
				
			||||||
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					            fail("Could not deserialize jsonRequest!", e);
 | 
				
			||||||
 | 
					            return null;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private void assertEqualsAfterUnmarshalling(OllamaGenerateRequestModel unmarshalledRequest,
 | 
				
			||||||
 | 
					            OllamaGenerateRequestModel req) {
 | 
				
			||||||
 | 
					        assertEquals(req, unmarshalledRequest);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
		Reference in New Issue
	
	Block a user