mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-11-04 02:20:50 +01:00 
			
		
		
		
	Compare commits
	
		
			16 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					9c46b510d8 | ||
| 
						 | 
					9d887b60a8 | ||
| 
						 | 
					63d4de4e24 | ||
| 
						 | 
					a10692e2f1 | ||
| 
						 | 
					b0c152a42e | ||
| 
						 | 
					f44767e023 | ||
| 
						 | 
					aadef0a57c | ||
| 
						 | 
					777ee7ffe0 | ||
| 
						 | 
					dcf1d0bdbc | ||
| 
						 | 
					13b7111a42 | ||
| 
						 | 
					09442d37a3 | ||
| 
						 | 
					1e66bdb07f | ||
| 
						 | 
					b423090db9 | ||
| 
						 | 
					a32d94efbf | ||
| 
						 | 
					abb76ad867 | ||
| 
						 | 
					cf4e7a96e8 | 
							
								
								
									
										12
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								README.md
									
									
									
									
									
								
							@@ -67,7 +67,7 @@ In your Maven project, add this dependency:
 | 
			
		||||
<dependency>
 | 
			
		||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
			
		||||
    <artifactId>ollama4j</artifactId>
 | 
			
		||||
    <version>1.0.47</version>
 | 
			
		||||
    <version>1.0.57</version>
 | 
			
		||||
</dependency>
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
@@ -125,15 +125,15 @@ Actions CI workflow.
 | 
			
		||||
- [x] Update request body creation with Java objects
 | 
			
		||||
- [ ] Async APIs for images
 | 
			
		||||
- [ ] Add custom headers to requests
 | 
			
		||||
- [ ] Add additional params for `ask` APIs such as:
 | 
			
		||||
- [x] Add additional params for `ask` APIs such as:
 | 
			
		||||
    - [x] `options`: additional model parameters for the Modelfile such as `temperature` -
 | 
			
		||||
      Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
			
		||||
    - [ ] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [ ] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [ ] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
			
		||||
    - [x] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [x] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
			
		||||
      short
 | 
			
		||||
      conversational memory
 | 
			
		||||
    - [ ] `stream`: Add support for streaming responses from the model
 | 
			
		||||
    - [x] `stream`: Add support for streaming responses from the model
 | 
			
		||||
- [ ] Add test cases
 | 
			
		||||
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "label": "APIs - Extras",
 | 
			
		||||
  "position": 10,
 | 
			
		||||
  "position": 4,
 | 
			
		||||
  "link": {
 | 
			
		||||
    "type": "generated-index",
 | 
			
		||||
    "description": "Details of APIs to handle bunch of extra stuff."
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "label": "APIs - Ask",
 | 
			
		||||
  "position": 10,
 | 
			
		||||
  "label": "APIs - Generate",
 | 
			
		||||
  "position": 3,
 | 
			
		||||
  "link": {
 | 
			
		||||
    "type": "generated-index",
 | 
			
		||||
    "description": "Details of APIs to interact with LLMs."
 | 
			
		||||
@@ -41,6 +41,41 @@ You will get a response similar to:
 | 
			
		||||
> require
 | 
			
		||||
> natural language understanding and generation capabilities.
 | 
			
		||||
 | 
			
		||||
## Try asking a question, receiving the answer streamed
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
public class Main {
 | 
			
		||||
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
 | 
			
		||||
        String host = "http://localhost:11434/";
 | 
			
		||||
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        // define a stream handler (Consumer<String>)
 | 
			
		||||
        OllamaStreamHandler streamHandler = (s) -> {
 | 
			
		||||
           System.out.println(s);
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        // Should be called using seperate thread to gain non blocking streaming effect.
 | 
			
		||||
        OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
			
		||||
          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
			
		||||
          new OptionsBuilder().build(), streamHandler);
 | 
			
		||||
        
 | 
			
		||||
        System.out.println("Full response: " +result.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
```
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
> The
 | 
			
		||||
> The capital
 | 
			
		||||
> The capital of
 | 
			
		||||
> The capital of France
 | 
			
		||||
> The capital of France is 
 | 
			
		||||
> The capital of France is Paris
 | 
			
		||||
> The capital of France is Paris.
 | 
			
		||||
> Full response: The capital of France is Paris.
 | 
			
		||||
 | 
			
		||||
## Try asking a question from general topics.
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "label": "APIs - Model Management",
 | 
			
		||||
  "position": 4,
 | 
			
		||||
  "position": 2,
 | 
			
		||||
  "link": {
 | 
			
		||||
    "type": "generated-index",
 | 
			
		||||
    "description": "Details of APIs to manage LLMs."
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										6
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										6
									
								
								pom.xml
									
									
									
									
									
								
							@@ -4,7 +4,7 @@
 | 
			
		||||
 | 
			
		||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
			
		||||
    <artifactId>ollama4j</artifactId>
 | 
			
		||||
    <version>1.0.54</version>
 | 
			
		||||
    <version>1.0.57</version>
 | 
			
		||||
 | 
			
		||||
    <name>Ollama4j</name>
 | 
			
		||||
    <description>Java library for interacting with Ollama API.</description>
 | 
			
		||||
@@ -39,7 +39,7 @@
 | 
			
		||||
        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
			
		||||
        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
			
		||||
        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
			
		||||
        <tag>v1.0.54</tag>
 | 
			
		||||
        <tag>v1.0.57</tag>
 | 
			
		||||
    </scm>
 | 
			
		||||
 | 
			
		||||
    <build>
 | 
			
		||||
@@ -99,7 +99,7 @@
 | 
			
		||||
                <configuration>
 | 
			
		||||
                    <skipTests>${skipUnitTests}</skipTests>
 | 
			
		||||
                    <includes>
 | 
			
		||||
                        <include>**/unittests/*.java</include>
 | 
			
		||||
                        <include>**/unittests/**/*.java</include>
 | 
			
		||||
                    </includes>
 | 
			
		||||
                </configuration>
 | 
			
		||||
            </plugin>
 | 
			
		||||
 
 | 
			
		||||
@@ -6,10 +6,11 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller;
 | 
			
		||||
@@ -313,8 +314,18 @@ public class OllamaAPI {
 | 
			
		||||
   */
 | 
			
		||||
  public List<Double> generateEmbeddings(String model, String prompt)
 | 
			
		||||
      throws IOException, InterruptedException, OllamaBaseException {
 | 
			
		||||
        return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
   * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}.
 | 
			
		||||
   *
 | 
			
		||||
   * @param modelRequest request for '/api/embeddings' endpoint
 | 
			
		||||
   * @return embeddings
 | 
			
		||||
   */
 | 
			
		||||
  public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException{
 | 
			
		||||
    URI uri = URI.create(this.host + "/api/embeddings");
 | 
			
		||||
    String jsonData = new ModelEmbeddingsRequest(model, prompt).toString();
 | 
			
		||||
    String jsonData = modelRequest.toString();
 | 
			
		||||
    HttpClient httpClient = HttpClient.newHttpClient();
 | 
			
		||||
    HttpRequest.Builder requestBuilder =
 | 
			
		||||
        getRequestBuilderDefault(uri)
 | 
			
		||||
@@ -325,8 +336,8 @@ public class OllamaAPI {
 | 
			
		||||
    int statusCode = response.statusCode();
 | 
			
		||||
    String responseBody = response.body();
 | 
			
		||||
    if (statusCode == 200) {
 | 
			
		||||
      EmbeddingResponse embeddingResponse =
 | 
			
		||||
          Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class);
 | 
			
		||||
      OllamaEmbeddingResponseModel embeddingResponse =
 | 
			
		||||
          Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class);
 | 
			
		||||
      return embeddingResponse.getEmbedding();
 | 
			
		||||
    } else {
 | 
			
		||||
      throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
			
		||||
@@ -342,13 +353,24 @@ public class OllamaAPI {
 | 
			
		||||
   * @param options the Options object - <a
 | 
			
		||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
			
		||||
   *     details on the options</a>
 | 
			
		||||
   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
			
		||||
   * @return OllamaResult that includes response text and time taken for response
 | 
			
		||||
   */
 | 
			
		||||
  public OllamaResult generate(String model, String prompt, Options options)
 | 
			
		||||
  public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler)
 | 
			
		||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
			
		||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
			
		||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
			
		||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Convenience method to call Ollama API without streaming responses.
 | 
			
		||||
   * 
 | 
			
		||||
   * Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
 | 
			
		||||
   */
 | 
			
		||||
  public OllamaResult generate(String model, String prompt, Options options)
 | 
			
		||||
  throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    return generate(model, prompt, options,null);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
@@ -381,10 +403,11 @@ public class OllamaAPI {
 | 
			
		||||
   * @param options the Options object - <a
 | 
			
		||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
			
		||||
   *     details on the options</a>
 | 
			
		||||
   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
			
		||||
   * @return OllamaResult that includes response text and time taken for response
 | 
			
		||||
   */
 | 
			
		||||
  public OllamaResult generateWithImageFiles(
 | 
			
		||||
      String model, String prompt, List<File> imageFiles, Options options)
 | 
			
		||||
      String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler)
 | 
			
		||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    List<String> images = new ArrayList<>();
 | 
			
		||||
    for (File imageFile : imageFiles) {
 | 
			
		||||
@@ -392,9 +415,20 @@ public class OllamaAPI {
 | 
			
		||||
    }
 | 
			
		||||
    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
			
		||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
			
		||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
			
		||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
   /**
 | 
			
		||||
   * Convenience method to call Ollama API without streaming responses.
 | 
			
		||||
   * 
 | 
			
		||||
   * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
 | 
			
		||||
   */
 | 
			
		||||
  public OllamaResult generateWithImageFiles(
 | 
			
		||||
    String model, String prompt, List<File> imageFiles, Options options)
 | 
			
		||||
    throws OllamaBaseException, IOException, InterruptedException{
 | 
			
		||||
      return generateWithImageFiles(model, prompt, imageFiles, options, null);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * With one or more image URLs, ask a question to a model running on Ollama server. This is a
 | 
			
		||||
   * sync/blocking call.
 | 
			
		||||
@@ -405,10 +439,11 @@ public class OllamaAPI {
 | 
			
		||||
   * @param options the Options object - <a
 | 
			
		||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
			
		||||
   *     details on the options</a>
 | 
			
		||||
   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
			
		||||
   * @return OllamaResult that includes response text and time taken for response
 | 
			
		||||
   */
 | 
			
		||||
  public OllamaResult generateWithImageURLs(
 | 
			
		||||
      String model, String prompt, List<String> imageURLs, Options options)
 | 
			
		||||
      String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler)
 | 
			
		||||
      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
			
		||||
    List<String> images = new ArrayList<>();
 | 
			
		||||
    for (String imageURL : imageURLs) {
 | 
			
		||||
@@ -416,7 +451,18 @@ public class OllamaAPI {
 | 
			
		||||
    }
 | 
			
		||||
    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
			
		||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
			
		||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
			
		||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Convenience method to call Ollama API without streaming responses.
 | 
			
		||||
   * 
 | 
			
		||||
   * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
 | 
			
		||||
   */
 | 
			
		||||
  public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
 | 
			
		||||
      Options options)
 | 
			
		||||
      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
			
		||||
    return generateWithImageURLs(model, prompt, imageURLs, options, null);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -487,10 +533,19 @@ public class OllamaAPI {
 | 
			
		||||
    return Base64.getEncoder().encodeToString(bytes);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequestModel ollamaRequestModel)
 | 
			
		||||
  private OllamaResult generateSyncForOllamaRequestModel(
 | 
			
		||||
      OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
			
		||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
			
		||||
        return requestCaller.callSync(ollamaRequestModel);
 | 
			
		||||
    OllamaGenerateEndpointCaller requestCaller =
 | 
			
		||||
        new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
			
		||||
    OllamaResult result;
 | 
			
		||||
    if (streamHandler != null) {
 | 
			
		||||
      ollamaRequestModel.setStream(true);
 | 
			
		||||
      result = requestCaller.call(ollamaRequestModel, streamHandler);
 | 
			
		||||
    } else {
 | 
			
		||||
      result = requestCaller.callSync(ollamaRequestModel);
 | 
			
		||||
    }
 | 
			
		||||
    return result;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,8 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@@ -34,4 +36,13 @@ public class Model {
 | 
			
		||||
    return name.split(":")[1];
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@@ -16,5 +17,14 @@ public class ModelDetail {
 | 
			
		||||
  private String parameters;
 | 
			
		||||
  private String template;
 | 
			
		||||
  private String system;
 | 
			
		||||
  private Map<String, String> details;
 | 
			
		||||
  private ModelMeta details;
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@@ -21,4 +23,13 @@ public class ModelMeta {
 | 
			
		||||
 | 
			
		||||
  @JsonProperty("quantization_level")
 | 
			
		||||
  private String quantizationLevel;
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,10 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.stream.Collectors;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
import lombok.NonNull;
 | 
			
		||||
 | 
			
		||||
public class OllamaChatStreamObserver {
 | 
			
		||||
 | 
			
		||||
@@ -13,7 +11,7 @@ public class OllamaChatStreamObserver {
 | 
			
		||||
 | 
			
		||||
    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
			
		||||
 | 
			
		||||
    private String message;
 | 
			
		||||
    private String message = "";
 | 
			
		||||
 | 
			
		||||
    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
			
		||||
        this.streamHandler = streamHandler;
 | 
			
		||||
@@ -25,8 +23,7 @@ public class OllamaChatStreamObserver {
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
 | 
			
		||||
        List<@NonNull String> allResponsePartsByNow = responseParts.stream().map(r -> r.getMessage().getContent()).collect(Collectors.toList());
 | 
			
		||||
        message = String.join("", allResponsePartsByNow);
 | 
			
		||||
        message = message + currentResponsePart.getMessage().getContent();
 | 
			
		||||
        streamHandler.accept(message);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
 | 
			
		||||
@@ -7,7 +7,7 @@ import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@SuppressWarnings("unused")
 | 
			
		||||
@Data
 | 
			
		||||
public class EmbeddingResponse {
 | 
			
		||||
public class OllamaEmbeddingResponseModel {
 | 
			
		||||
    @JsonProperty("embedding")
 | 
			
		||||
    private List<Double> embedding;
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,31 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
			
		||||
 | 
			
		||||
public class OllamaEmbeddingsRequestBuilder {
 | 
			
		||||
 | 
			
		||||
    private OllamaEmbeddingsRequestBuilder(String model, String prompt){
 | 
			
		||||
        request = new OllamaEmbeddingsRequestModel(model, prompt);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private OllamaEmbeddingsRequestModel request;
 | 
			
		||||
 | 
			
		||||
    public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){
 | 
			
		||||
        return new OllamaEmbeddingsRequestBuilder(model, prompt);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaEmbeddingsRequestModel build(){
 | 
			
		||||
        return request;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaEmbeddingsRequestBuilder withOptions(Options options){
 | 
			
		||||
        this.request.setOptions(options.getOptionsMap());
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){
 | 
			
		||||
        this.request.setKeepAlive(keepAlive);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,33 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
			
		||||
 | 
			
		||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
import lombok.NoArgsConstructor;
 | 
			
		||||
import lombok.NonNull;
 | 
			
		||||
import lombok.RequiredArgsConstructor;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@RequiredArgsConstructor
 | 
			
		||||
@NoArgsConstructor
 | 
			
		||||
public class OllamaEmbeddingsRequestModel {
 | 
			
		||||
  @NonNull
 | 
			
		||||
  private String model;
 | 
			
		||||
  @NonNull
 | 
			
		||||
  private String prompt;
 | 
			
		||||
 | 
			
		||||
  protected Map<String, Object> options;
 | 
			
		||||
  @JsonProperty(value = "keep_alive")
 | 
			
		||||
  private String keepAlive;
 | 
			
		||||
 | 
			
		||||
  @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,31 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
			
		||||
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
 | 
			
		||||
public class OllamaGenerateStreamObserver {
 | 
			
		||||
 | 
			
		||||
    private OllamaStreamHandler streamHandler;
 | 
			
		||||
 | 
			
		||||
    private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
 | 
			
		||||
 | 
			
		||||
    private String message = "";
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
 | 
			
		||||
        this.streamHandler = streamHandler;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void notify(OllamaGenerateResponseModel currentResponsePart){
 | 
			
		||||
        responseParts.add(currentResponsePart);
 | 
			
		||||
        handleCurrentResponsePart(currentResponsePart);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){
 | 
			
		||||
        message = message + currentResponsePart.getResponse();
 | 
			
		||||
        streamHandler.accept(message);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -1,23 +0,0 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
			
		||||
 | 
			
		||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import lombok.AllArgsConstructor;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@AllArgsConstructor
 | 
			
		||||
public class ModelEmbeddingsRequest {
 | 
			
		||||
  private String model;
 | 
			
		||||
  private String prompt;
 | 
			
		||||
 | 
			
		||||
  @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,18 +1,25 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
			
		||||
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
 | 
			
		||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
			
		||||
 | 
			
		||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
			
		||||
 | 
			
		||||
    private OllamaGenerateStreamObserver streamObserver;
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
			
		||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);   
 | 
			
		||||
    }
 | 
			
		||||
@@ -27,6 +34,9 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
			
		||||
                try {
 | 
			
		||||
                    OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
			
		||||
                    responseBuffer.append(ollamaResponseModel.getResponse());
 | 
			
		||||
                    if(streamObserver != null) {
 | 
			
		||||
                        streamObserver.notify(ollamaResponseModel);
 | 
			
		||||
                    }
 | 
			
		||||
                    return ollamaResponseModel.isDone();
 | 
			
		||||
                } catch (JsonProcessingException e) {
 | 
			
		||||
                    LOG.error("Error parsing the Ollama chat response!",e);
 | 
			
		||||
@@ -34,7 +44,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
			
		||||
                }         
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    
 | 
			
		||||
    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
			
		||||
        throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    streamObserver = new OllamaGenerateStreamObserver(streamHandler);
 | 
			
		||||
    return super.callSync(body);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,11 +4,14 @@ import static org.junit.jupiter.api.Assertions.*;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
@@ -60,7 +63,7 @@ class TestRealAPIs {
 | 
			
		||||
    } catch (HttpConnectTimeoutException e) {
 | 
			
		||||
      fail(e.getMessage());
 | 
			
		||||
    } catch (Exception e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -72,7 +75,7 @@ class TestRealAPIs {
 | 
			
		||||
      assertNotNull(ollamaAPI.listModels());
 | 
			
		||||
      ollamaAPI.listModels().forEach(System.out::println);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -87,7 +90,20 @@ class TestRealAPIs {
 | 
			
		||||
              .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
 | 
			
		||||
      assertTrue(found);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testListDtails() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
 | 
			
		||||
      assertNotNull(modelDetails);
 | 
			
		||||
      System.out.println(modelDetails);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -105,7 +121,33 @@ class TestRealAPIs {
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithDefaultOptionsStreamed() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
 | 
			
		||||
      StringBuffer sb = new StringBuffer("");
 | 
			
		||||
 | 
			
		||||
      OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
			
		||||
          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
			
		||||
          new OptionsBuilder().build(), (s) -> {
 | 
			
		||||
            LOG.info(s);
 | 
			
		||||
            String substring = s.substring(sb.toString().length(), s.length());
 | 
			
		||||
            LOG.info(substring);
 | 
			
		||||
            sb.append(substring);
 | 
			
		||||
          });
 | 
			
		||||
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -123,7 +165,7 @@ class TestRealAPIs {
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -143,7 +185,7 @@ class TestRealAPIs {
 | 
			
		||||
      assertFalse(chatResult.getResponse().isBlank());
 | 
			
		||||
      assertEquals(4,chatResult.getChatHistory().size());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -165,7 +207,7 @@ class TestRealAPIs {
 | 
			
		||||
      assertTrue(chatResult.getResponse().startsWith("NI"));
 | 
			
		||||
      assertEquals(3, chatResult.getChatHistory().size());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -183,14 +225,14 @@ class TestRealAPIs {
 | 
			
		||||
 | 
			
		||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
 | 
			
		||||
        LOG.info(s);
 | 
			
		||||
        String substring = s.substring(sb.toString().length(), s.length()-1);
 | 
			
		||||
        String substring = s.substring(sb.toString().length(), s.length());
 | 
			
		||||
        LOG.info(substring);
 | 
			
		||||
        sb.append(substring);
 | 
			
		||||
      });
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
      assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -221,7 +263,7 @@ class TestRealAPIs {
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -238,7 +280,7 @@ class TestRealAPIs {
 | 
			
		||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -258,7 +300,31 @@ class TestRealAPIs {
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithOptionsAndImageFilesStreamed() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
			
		||||
    try {
 | 
			
		||||
      StringBuffer sb = new StringBuffer("");
 | 
			
		||||
 | 
			
		||||
      OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
 | 
			
		||||
          "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
 | 
			
		||||
            LOG.info(s);
 | 
			
		||||
            String substring = s.substring(sb.toString().length(), s.length());
 | 
			
		||||
            LOG.info(substring);
 | 
			
		||||
            sb.append(substring);
 | 
			
		||||
          });
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -278,7 +344,24 @@ class TestRealAPIs {
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  public void testEmbedding() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
 | 
			
		||||
          .getInstance(config.getModel(), "What is the capital of France?").build();
 | 
			
		||||
 | 
			
		||||
      List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
 | 
			
		||||
 | 
			
		||||
      assertNotNull(embeddings);
 | 
			
		||||
      assertFalse(embeddings.isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -0,0 +1,35 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.fail;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
 | 
			
		||||
public abstract class AbstractRequestSerializationTest<T> {
 | 
			
		||||
 | 
			
		||||
    protected ObjectMapper mapper = Utils.getObjectMapper();
 | 
			
		||||
 | 
			
		||||
    protected String serializeRequest(T req) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.writeValueAsString(req);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not serialize request!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    protected T deserializeRequest(String jsonRequest, Class<T> requestClass) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.readValue(jsonRequest, requestClass);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not deserialize jsonRequest!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest,
 | 
			
		||||
        T req) {
 | 
			
		||||
        assertEquals(req, unmarshalledRequest);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,7 +1,6 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.fail;
 | 
			
		||||
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
@@ -10,21 +9,15 @@ import org.json.JSONObject;
 | 
			
		||||
import org.junit.jupiter.api.BeforeEach;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
 | 
			
		||||
public class TestChatRequestSerialization {
 | 
			
		||||
public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{
 | 
			
		||||
 | 
			
		||||
    private OllamaChatRequestBuilder builder;
 | 
			
		||||
 | 
			
		||||
    private ObjectMapper mapper = Utils.getObjectMapper();
 | 
			
		||||
 | 
			
		||||
    @BeforeEach
 | 
			
		||||
    public void init() {
 | 
			
		||||
        builder = OllamaChatRequestBuilder.getInstance("DummyModel");
 | 
			
		||||
@@ -32,10 +25,9 @@ public class TestChatRequestSerialization {
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testRequestOnlyMandatoryFields() {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
			
		||||
                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
 | 
			
		||||
        String jsonRequest = serializeRequest(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
@@ -44,7 +36,7 @@ public class TestChatRequestSerialization {
 | 
			
		||||
        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
			
		||||
        .build();
 | 
			
		||||
        String jsonRequest = serializeRequest(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
@@ -52,7 +44,7 @@ public class TestChatRequestSerialization {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
			
		||||
                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
			
		||||
        String jsonRequest = serializeRequest(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
@@ -62,7 +54,7 @@ public class TestChatRequestSerialization {
 | 
			
		||||
                .withOptions(b.setMirostat(1).build()).build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serializeRequest(req);
 | 
			
		||||
        OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest);
 | 
			
		||||
        OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaChatRequestModel.class);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
			
		||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
			
		||||
    }
 | 
			
		||||
@@ -79,28 +71,4 @@ public class TestChatRequestSerialization {
 | 
			
		||||
        String requestFormatProperty = jsonObject.getString("format");
 | 
			
		||||
        assertEquals("json", requestFormatProperty);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private String serializeRequest(OllamaChatRequestModel req) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.writeValueAsString(req);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not serialize request!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private OllamaChatRequestModel deserializeRequest(String jsonRequest) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.readValue(jsonRequest, OllamaChatRequestModel.class);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not deserialize jsonRequest!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private void assertEqualsAfterUnmarshalling(OllamaChatRequestModel unmarshalledRequest,
 | 
			
		||||
            OllamaChatRequestModel req) {
 | 
			
		||||
        assertEquals(req, unmarshalledRequest);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -0,0 +1,37 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
import org.junit.jupiter.api.BeforeEach;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
 | 
			
		||||
public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{
 | 
			
		||||
 | 
			
		||||
        private OllamaEmbeddingsRequestBuilder builder;
 | 
			
		||||
 | 
			
		||||
        @BeforeEach
 | 
			
		||||
        public void init() {
 | 
			
		||||
            builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt");
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
            @Test
 | 
			
		||||
    public void testRequestOnlyMandatoryFields() {
 | 
			
		||||
        OllamaEmbeddingsRequestModel req = builder.build();
 | 
			
		||||
        String jsonRequest = serializeRequest(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
        @Test
 | 
			
		||||
        public void testRequestWithOptions() {
 | 
			
		||||
            OptionsBuilder b = new OptionsBuilder();
 | 
			
		||||
            OllamaEmbeddingsRequestModel req = builder
 | 
			
		||||
                    .withOptions(b.setMirostat(1).build()).build();
 | 
			
		||||
 | 
			
		||||
            String jsonRequest = serializeRequest(req);
 | 
			
		||||
            OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class);
 | 
			
		||||
            assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
			
		||||
            assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
			
		||||
        }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,26 +1,20 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.fail;
 | 
			
		||||
 | 
			
		||||
import org.json.JSONObject;
 | 
			
		||||
import org.junit.jupiter.api.BeforeEach;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
 | 
			
		||||
public class TestGenerateRequestSerialization {
 | 
			
		||||
public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{
 | 
			
		||||
 | 
			
		||||
    private OllamaGenerateRequestBuilder builder;
 | 
			
		||||
 | 
			
		||||
    private ObjectMapper mapper = Utils.getObjectMapper();
 | 
			
		||||
 | 
			
		||||
    @BeforeEach
 | 
			
		||||
    public void init() {
 | 
			
		||||
        builder = OllamaGenerateRequestBuilder.getInstance("DummyModel");
 | 
			
		||||
@@ -31,7 +25,7 @@ public class TestGenerateRequestSerialization {
 | 
			
		||||
        OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serializeRequest(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
@@ -41,7 +35,7 @@ public class TestGenerateRequestSerialization {
 | 
			
		||||
                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serializeRequest(req);
 | 
			
		||||
        OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest);
 | 
			
		||||
        OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
			
		||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
			
		||||
    }
 | 
			
		||||
@@ -59,27 +53,4 @@ public class TestGenerateRequestSerialization {
 | 
			
		||||
        assertEquals("json", requestFormatProperty);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private String serializeRequest(OllamaGenerateRequestModel req) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.writeValueAsString(req);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not serialize request!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private OllamaGenerateRequestModel deserializeRequest(String jsonRequest) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.readValue(jsonRequest, OllamaGenerateRequestModel.class);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not deserialize jsonRequest!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private void assertEqualsAfterUnmarshalling(OllamaGenerateRequestModel unmarshalledRequest,
 | 
			
		||||
            OllamaGenerateRequestModel req) {
 | 
			
		||||
        assertEquals(req, unmarshalledRequest);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user