forked from Mirror/ollama4j
		
	Compare commits
	
		
			11 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					455251d1d4 | ||
| 
						 | 
					ec00ffae7f | ||
| 
						 | 
					d969c7ad46 | ||
| 
						 | 
					02bf769188 | ||
| 
						 | 
					1c8a6b4f2a | ||
| 
						 | 
					60fe5d6ffb | ||
| 
						 | 
					327ae7437f | ||
| 
						 | 
					795b9f2b9b | ||
| 
						 | 
					54da069e68 | ||
| 
						 | 
					bfc5cebac1 | ||
| 
						 | 
					d46b1d48d8 | 
@@ -67,7 +67,7 @@ In your Maven project, add this dependency:
 | 
				
			|||||||
<dependency>
 | 
					<dependency>
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.amithkoujalgi</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.29</version>
 | 
					    <version>1.0.47</version>
 | 
				
			||||||
</dependency>
 | 
					</dependency>
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,7 @@
 | 
				
			|||||||
sidebar_position: 2
 | 
					sidebar_position: 2
 | 
				
			||||||
---
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Ask - Async
 | 
					# Generate - Async
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions to the LLMs in a asynchronous way.
 | 
					This API lets you ask questions to the LLMs in a asynchronous way.
 | 
				
			||||||
These APIs correlate to
 | 
					These APIs correlate to
 | 
				
			||||||
@@ -19,13 +19,13 @@ public class Main {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        String prompt = "Who are you?";
 | 
					        String prompt = "Who are you?";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt);
 | 
					        OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        while (!callback.isComplete() || !callback.getStream().isEmpty()) {
 | 
					        while (!callback.isComplete() || !callback.getStream().isEmpty()) {
 | 
				
			||||||
            // poll for data from the response stream
 | 
					            // poll for data from the response stream
 | 
				
			||||||
            String result = callback.getStream().poll();
 | 
					            String result = callback.getStream().poll();
 | 
				
			||||||
            if (response != null) {
 | 
					            if (result != null) {
 | 
				
			||||||
                System.out.print(result.getResponse());
 | 
					                System.out.print(result);
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
            Thread.sleep(100);
 | 
					            Thread.sleep(100);
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,7 @@
 | 
				
			|||||||
sidebar_position: 3
 | 
					sidebar_position: 3
 | 
				
			||||||
---
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Ask - With Image Files
 | 
					# Generate - With Image Files
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions along with the image files to the LLMs.
 | 
					This API lets you ask questions along with the image files to the LLMs.
 | 
				
			||||||
These APIs correlate to
 | 
					These APIs correlate to
 | 
				
			||||||
@@ -15,7 +15,7 @@ recommended.
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
:::
 | 
					:::
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Ask (Sync)
 | 
					## Synchronous mode
 | 
				
			||||||
 | 
					
 | 
				
			||||||
If you have this image downloaded and you pass the path to the downloaded image to the following code:
 | 
					If you have this image downloaded and you pass the path to the downloaded image to the following code:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -29,7 +29,7 @@ public class Main {
 | 
				
			|||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
        ollamaAPI.setRequestTimeoutSeconds(10);
 | 
					        ollamaAPI.setRequestTimeoutSeconds(10);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaResult result = ollamaAPI.askWithImageFiles(OllamaModelType.LLAVA,
 | 
					        OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
 | 
				
			||||||
                "What's in this image?",
 | 
					                "What's in this image?",
 | 
				
			||||||
                List.of(
 | 
					                List.of(
 | 
				
			||||||
                        new File("/path/to/image")));
 | 
					                        new File("/path/to/image")));
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,7 @@
 | 
				
			|||||||
sidebar_position: 4
 | 
					sidebar_position: 4
 | 
				
			||||||
---
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Ask - With Image URLs
 | 
					# Generate - With Image URLs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions along with the image files to the LLMs.
 | 
					This API lets you ask questions along with the image files to the LLMs.
 | 
				
			||||||
These APIs correlate to
 | 
					These APIs correlate to
 | 
				
			||||||
@@ -29,7 +29,7 @@ public class Main {
 | 
				
			|||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
        ollamaAPI.setRequestTimeoutSeconds(10);
 | 
					        ollamaAPI.setRequestTimeoutSeconds(10);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaResult result = ollamaAPI.askWithImageURLs(OllamaModelType.LLAVA,
 | 
					        OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
 | 
				
			||||||
                "What's in this image?",
 | 
					                "What's in this image?",
 | 
				
			||||||
                List.of(
 | 
					                List.of(
 | 
				
			||||||
                        "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
 | 
					                        "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,7 @@
 | 
				
			|||||||
sidebar_position: 1
 | 
					sidebar_position: 1
 | 
				
			||||||
---
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Ask - Sync
 | 
					# Generate - Sync
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions to the LLMs in a synchronous way.
 | 
					This API lets you ask questions to the LLMs in a synchronous way.
 | 
				
			||||||
These APIs correlate to
 | 
					These APIs correlate to
 | 
				
			||||||
@@ -25,7 +25,7 @@ public class Main {
 | 
				
			|||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaResult result =
 | 
					        OllamaResult result =
 | 
				
			||||||
                ollamaAPI.ask(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
 | 
					                ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        System.out.println(result.getResponse());
 | 
					        System.out.println(result.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -55,7 +55,7 @@ public class Main {
 | 
				
			|||||||
        String prompt = "List all cricket world cup teams of 2019.";
 | 
					        String prompt = "List all cricket world cup teams of 2019.";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaResult result =
 | 
					        OllamaResult result =
 | 
				
			||||||
                ollamaAPI.ask(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
 | 
					                ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        System.out.println(result.getResponse());
 | 
					        System.out.println(result.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -97,7 +97,7 @@ public class Main {
 | 
				
			|||||||
                SamplePrompts.getSampleDatabasePromptWithQuestion(
 | 
					                SamplePrompts.getSampleDatabasePromptWithQuestion(
 | 
				
			||||||
                        "List all customer names who have bought one or more products");
 | 
					                        "List all customer names who have bought one or more products");
 | 
				
			||||||
        OllamaResult result =
 | 
					        OllamaResult result =
 | 
				
			||||||
                ollamaAPI.ask(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
 | 
					                ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
 | 
				
			||||||
        System.out.println(result.getResponse());
 | 
					        System.out.println(result.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -42,7 +42,7 @@ public class AskPhi {
 | 
				
			|||||||
                        .addSeparator()
 | 
					                        .addSeparator()
 | 
				
			||||||
                        .add("How do I read a file in Go and print its contents to stdout?");
 | 
					                        .add("How do I read a file in Go and print its contents to stdout?");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaResult response = ollamaAPI.ask(model, promptBuilder.build());
 | 
					        OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
 | 
				
			||||||
        System.out.println(response.getResponse());
 | 
					        System.out.println(response.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -79,6 +79,7 @@ const config = {
 | 
				
			|||||||
                        label: 'Docs',
 | 
					                        label: 'Docs',
 | 
				
			||||||
                    },
 | 
					                    },
 | 
				
			||||||
                    {to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
 | 
					                    {to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
 | 
				
			||||||
 | 
					                    {to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
 | 
				
			||||||
                    {to: '/blog', label: 'Blog', position: 'left'},
 | 
					                    {to: '/blog', label: 'Blog', position: 'left'},
 | 
				
			||||||
                    {
 | 
					                    {
 | 
				
			||||||
                        href: 'https://github.com/amithkoujalgi/ollama4j',
 | 
					                        href: 'https://github.com/amithkoujalgi/ollama4j',
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										4
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								pom.xml
									
									
									
									
									
								
							@@ -4,7 +4,7 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.amithkoujalgi</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.45</version>
 | 
					    <version>1.0.48</version>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <name>Ollama4j</name>
 | 
					    <name>Ollama4j</name>
 | 
				
			||||||
    <description>Java library for interacting with Ollama API.</description>
 | 
					    <description>Java library for interacting with Ollama API.</description>
 | 
				
			||||||
@@ -39,7 +39,7 @@
 | 
				
			|||||||
        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
					        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
				
			||||||
        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
					        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
				
			||||||
        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
					        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
				
			||||||
        <tag>v1.0.45</tag>
 | 
					        <tag>v1.0.48</tag>
 | 
				
			||||||
    </scm>
 | 
					    </scm>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <build>
 | 
					    <build>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -329,7 +329,8 @@ public class OllamaAPI {
 | 
				
			|||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
   * Ask a question to a model running on Ollama server. This is a sync/blocking call.
 | 
					   * Generate response for a question to a model running on Ollama server. This is a sync/blocking
 | 
				
			||||||
 | 
					   * call.
 | 
				
			||||||
   *
 | 
					   *
 | 
				
			||||||
   * @param model the ollama model to ask the question to
 | 
					   * @param model the ollama model to ask the question to
 | 
				
			||||||
   * @param prompt the prompt/question text
 | 
					   * @param prompt the prompt/question text
 | 
				
			||||||
@@ -338,23 +339,23 @@ public class OllamaAPI {
 | 
				
			|||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaResult ask(String model, String prompt, Options options)
 | 
					  public OllamaResult generate(String model, String prompt, Options options)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
					    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
    return askSync(ollamaRequestModel);
 | 
					    return generateSync(ollamaRequestModel);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
   * Ask a question to a model running on Ollama server and get a callback handle that can be used
 | 
					   * Generate response for a question to a model running on Ollama server and get a callback handle
 | 
				
			||||||
   * to check for status and get the response from the model later. This would be an
 | 
					   * that can be used to check for status and get the response from the model later. This would be
 | 
				
			||||||
   * async/non-blocking call.
 | 
					   * an async/non-blocking call.
 | 
				
			||||||
   *
 | 
					   *
 | 
				
			||||||
   * @param model the ollama model to ask the question to
 | 
					   * @param model the ollama model to ask the question to
 | 
				
			||||||
   * @param prompt the prompt/question text
 | 
					   * @param prompt the prompt/question text
 | 
				
			||||||
   * @return the ollama async result callback handle
 | 
					   * @return the ollama async result callback handle
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaAsyncResultCallback askAsync(String model, String prompt) {
 | 
					  public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
					    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    URI uri = URI.create(this.host + "/api/generate");
 | 
					    URI uri = URI.create(this.host + "/api/generate");
 | 
				
			||||||
@@ -377,7 +378,7 @@ public class OllamaAPI {
 | 
				
			|||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaResult askWithImageFiles(
 | 
					  public OllamaResult generateWithImageFiles(
 | 
				
			||||||
      String model, String prompt, List<File> imageFiles, Options options)
 | 
					      String model, String prompt, List<File> imageFiles, Options options)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
    List<String> images = new ArrayList<>();
 | 
					    List<String> images = new ArrayList<>();
 | 
				
			||||||
@@ -386,7 +387,7 @@ public class OllamaAPI {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
					    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
    return askSync(ollamaRequestModel);
 | 
					    return generateSync(ollamaRequestModel);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
@@ -401,7 +402,7 @@ public class OllamaAPI {
 | 
				
			|||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaResult askWithImageURLs(
 | 
					  public OllamaResult generateWithImageURLs(
 | 
				
			||||||
      String model, String prompt, List<String> imageURLs, Options options)
 | 
					      String model, String prompt, List<String> imageURLs, Options options)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
				
			||||||
    List<String> images = new ArrayList<>();
 | 
					    List<String> images = new ArrayList<>();
 | 
				
			||||||
@@ -410,7 +411,7 @@ public class OllamaAPI {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
					    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
    return askSync(ollamaRequestModel);
 | 
					    return generateSync(ollamaRequestModel);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private static String encodeFileToBase64(File file) throws IOException {
 | 
					  private static String encodeFileToBase64(File file) throws IOException {
 | 
				
			||||||
@@ -435,7 +436,7 @@ public class OllamaAPI {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
 | 
					  private OllamaResult generateSync(OllamaRequestModel ollamaRequestModel)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
    long startTime = System.currentTimeMillis();
 | 
					    long startTime = System.currentTimeMillis();
 | 
				
			||||||
    HttpClient httpClient = HttpClient.newHttpClient();
 | 
					    HttpClient httpClient = HttpClient.newHttpClient();
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -101,7 +101,7 @@ class TestRealAPIs {
 | 
				
			|||||||
    testEndpointReachability();
 | 
					    testEndpointReachability();
 | 
				
			||||||
    try {
 | 
					    try {
 | 
				
			||||||
      OllamaResult result =
 | 
					      OllamaResult result =
 | 
				
			||||||
          ollamaAPI.ask(
 | 
					          ollamaAPI.generate(
 | 
				
			||||||
              OllamaModelType.LLAMA2,
 | 
					              OllamaModelType.LLAMA2,
 | 
				
			||||||
              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
					              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
              new OptionsBuilder().build());
 | 
					              new OptionsBuilder().build());
 | 
				
			||||||
@@ -119,7 +119,7 @@ class TestRealAPIs {
 | 
				
			|||||||
    testEndpointReachability();
 | 
					    testEndpointReachability();
 | 
				
			||||||
    try {
 | 
					    try {
 | 
				
			||||||
      OllamaResult result =
 | 
					      OllamaResult result =
 | 
				
			||||||
          ollamaAPI.ask(
 | 
					          ollamaAPI.generate(
 | 
				
			||||||
              OllamaModelType.LLAMA2,
 | 
					              OllamaModelType.LLAMA2,
 | 
				
			||||||
              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
					              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
              new OptionsBuilder().setTemperature(0.9f).build());
 | 
					              new OptionsBuilder().setTemperature(0.9f).build());
 | 
				
			||||||
@@ -138,7 +138,7 @@ class TestRealAPIs {
 | 
				
			|||||||
    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
					    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
				
			||||||
    try {
 | 
					    try {
 | 
				
			||||||
      OllamaResult result =
 | 
					      OllamaResult result =
 | 
				
			||||||
          ollamaAPI.askWithImageFiles(
 | 
					          ollamaAPI.generateWithImageFiles(
 | 
				
			||||||
              OllamaModelType.LLAVA,
 | 
					              OllamaModelType.LLAVA,
 | 
				
			||||||
              "What is in this image?",
 | 
					              "What is in this image?",
 | 
				
			||||||
              List.of(imageFile),
 | 
					              List.of(imageFile),
 | 
				
			||||||
@@ -157,7 +157,7 @@ class TestRealAPIs {
 | 
				
			|||||||
    testEndpointReachability();
 | 
					    testEndpointReachability();
 | 
				
			||||||
    try {
 | 
					    try {
 | 
				
			||||||
      OllamaResult result =
 | 
					      OllamaResult result =
 | 
				
			||||||
          ollamaAPI.askWithImageURLs(
 | 
					          ollamaAPI.generateWithImageURLs(
 | 
				
			||||||
              OllamaModelType.LLAVA,
 | 
					              OllamaModelType.LLAVA,
 | 
				
			||||||
              "What is in this image?",
 | 
					              "What is in this image?",
 | 
				
			||||||
              List.of(
 | 
					              List.of(
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -103,10 +103,10 @@ class TestMockedAPIs {
 | 
				
			|||||||
    String prompt = "some prompt text";
 | 
					    String prompt = "some prompt text";
 | 
				
			||||||
    OptionsBuilder optionsBuilder = new OptionsBuilder();
 | 
					    OptionsBuilder optionsBuilder = new OptionsBuilder();
 | 
				
			||||||
    try {
 | 
					    try {
 | 
				
			||||||
      when(ollamaAPI.ask(model, prompt, optionsBuilder.build()))
 | 
					      when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
 | 
				
			||||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
					          .thenReturn(new OllamaResult("", 0, 200));
 | 
				
			||||||
      ollamaAPI.ask(model, prompt, optionsBuilder.build());
 | 
					      ollamaAPI.generate(model, prompt, optionsBuilder.build());
 | 
				
			||||||
      verify(ollamaAPI, times(1)).ask(model, prompt, optionsBuilder.build());
 | 
					      verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -118,13 +118,14 @@ class TestMockedAPIs {
 | 
				
			|||||||
    String model = OllamaModelType.LLAMA2;
 | 
					    String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					    String prompt = "some prompt text";
 | 
				
			||||||
    try {
 | 
					    try {
 | 
				
			||||||
      when(ollamaAPI.askWithImageFiles(
 | 
					      when(ollamaAPI.generateWithImageFiles(
 | 
				
			||||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
					              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
				
			||||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
					          .thenReturn(new OllamaResult("", 0, 200));
 | 
				
			||||||
      ollamaAPI.askWithImageFiles(
 | 
					      ollamaAPI.generateWithImageFiles(
 | 
				
			||||||
          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
      verify(ollamaAPI, times(1))
 | 
					      verify(ollamaAPI, times(1))
 | 
				
			||||||
          .askWithImageFiles(model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					          .generateWithImageFiles(
 | 
				
			||||||
 | 
					              model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -136,13 +137,14 @@ class TestMockedAPIs {
 | 
				
			|||||||
    String model = OllamaModelType.LLAMA2;
 | 
					    String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					    String prompt = "some prompt text";
 | 
				
			||||||
    try {
 | 
					    try {
 | 
				
			||||||
      when(ollamaAPI.askWithImageURLs(
 | 
					      when(ollamaAPI.generateWithImageURLs(
 | 
				
			||||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
					              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
				
			||||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
					          .thenReturn(new OllamaResult("", 0, 200));
 | 
				
			||||||
      ollamaAPI.askWithImageURLs(
 | 
					      ollamaAPI.generateWithImageURLs(
 | 
				
			||||||
          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
      verify(ollamaAPI, times(1))
 | 
					      verify(ollamaAPI, times(1))
 | 
				
			||||||
          .askWithImageURLs(model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					          .generateWithImageURLs(
 | 
				
			||||||
 | 
					              model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -153,9 +155,9 @@ class TestMockedAPIs {
 | 
				
			|||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					    String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					    String prompt = "some prompt text";
 | 
				
			||||||
    when(ollamaAPI.askAsync(model, prompt))
 | 
					    when(ollamaAPI.generateAsync(model, prompt))
 | 
				
			||||||
        .thenReturn(new OllamaAsyncResultCallback(null, null, 3));
 | 
					        .thenReturn(new OllamaAsyncResultCallback(null, null, 3));
 | 
				
			||||||
    ollamaAPI.askAsync(model, prompt);
 | 
					    ollamaAPI.generateAsync(model, prompt);
 | 
				
			||||||
    verify(ollamaAPI, times(1)).askAsync(model, prompt);
 | 
					    verify(ollamaAPI, times(1)).generateAsync(model, prompt);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user