mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-10-31 16:40:41 +01:00 
			
		
		
		
	updated docs
This commit is contained in:
		| @@ -17,15 +17,16 @@ public class Main { | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         OllamaAsyncResultCallback ollamaAsyncResultCallback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, | ||||
|                 "Who are you?"); | ||||
|         String prompt = "Who are you?"; | ||||
|  | ||||
|         while (true) { | ||||
|             if (ollamaAsyncResultCallback.isComplete()) { | ||||
|                 System.out.println(ollamaAsyncResultCallback.getResponse()); | ||||
|                 break; | ||||
|         OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt); | ||||
|  | ||||
|         while (!callback.isComplete() || !callback.getStream().isEmpty()) { | ||||
|             // poll for data from the response stream | ||||
|             String result = callback.getStream().poll(); | ||||
|             if (response != null) { | ||||
|                 System.out.print(result.getResponse()); | ||||
|             } | ||||
|             // introduce sleep to check for status with a time interval | ||||
|             Thread.sleep(100); | ||||
|         } | ||||
|     } | ||||
|   | ||||
| @@ -8,11 +8,13 @@ This API lets you ask questions to the LLMs in a synchronous way. | ||||
| These APIs correlate to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | ||||
|  | ||||
| ## Try asking a question about the model. | ||||
|  | ||||
| ```java | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|          | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
| @@ -30,4 +32,75 @@ You will get a response similar to: | ||||
| > manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide | ||||
| > range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that | ||||
| > require | ||||
| > natural language understanding and generation capabilities. | ||||
| > natural language understanding and generation capabilities. | ||||
|  | ||||
| ## Try asking a question from general topics. | ||||
|  | ||||
| ```java | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         String prompt = "List all cricket world cup teams of 2019."; | ||||
|  | ||||
|         OllamaResult result = ollamaAPI.ask(OllamaModelType.LLAMA2, prompt); | ||||
|  | ||||
|         System.out.println(result.getResponse()); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| You'd then get a response from the model: | ||||
|  | ||||
| > The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The | ||||
| > following teams | ||||
| > participated in the tournament: | ||||
| > | ||||
| > 1. Afghanistan | ||||
| > 2. Australia | ||||
| > 3. Bangladesh | ||||
| > 4. England | ||||
| > 5. India | ||||
| > 6. New Zealand | ||||
| > 7. Pakistan | ||||
| > 8. South Africa | ||||
| > 9. Sri Lanka | ||||
| > 10. West Indies | ||||
| > | ||||
| > These teams competed in a round-robin format, with the top four teams advancing to the | ||||
| > semi-finals. The tournament was | ||||
| > won by the England cricket team, who defeated New Zealand in the final. | ||||
|  | ||||
| ## Try asking for a Database query for your data schema. | ||||
|  | ||||
| ```java | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         String prompt = SamplePrompts.getSampleDatabasePromptWithQuestion( | ||||
|                 "List all customer names who have bought one or more products"); | ||||
|         OllamaResult result = ollamaAPI.ask(OllamaModelType.SQLCODER, prompt); | ||||
|         System.out.println(result.getResponse()); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| _Note: Here I've used | ||||
| a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) | ||||
| containing a database schema from within this library for demonstration purposes._ | ||||
|  | ||||
| You'd then get a response from the model: | ||||
|  | ||||
| ```sql | ||||
| SELECT customers.name | ||||
| FROM sales | ||||
|          JOIN customers ON sales.customer_id = customers.customer_id | ||||
| GROUP BY customers.name; | ||||
| ``` | ||||
| @@ -13,7 +13,7 @@ Let's get started with **Ollama4j**. | ||||
| - **[Ollama](https://ollama.ai/download)** | ||||
| - **[Oracle JDK](https://www.oracle.com/java/technologies/javase/jdk11-archive-downloads.html)** or | ||||
|   **[Open JDK](https://jdk.java.net/archive/)** 11.0 or above. | ||||
| - **[Maven](https://maven.apache.org/download.cgi)** or **[Gradle](https://gradle.org/install/)** | ||||
| - **[Maven](https://maven.apache.org/download.cgi)** | ||||
|  | ||||
| ### Start Ollama server | ||||
|  | ||||
| @@ -43,9 +43,9 @@ The command runs the Ollama server locally at **http://localhost:11434/**. | ||||
|  | ||||
| ### Setup your project | ||||
|  | ||||
| Get started by **creating a new project** on your favorite IDE. | ||||
| Get started by **creating a new Maven project** on your favorite IDE. | ||||
|  | ||||
| If you're on Maven, add the dependency to your project's `pom.xml`. | ||||
| Add the dependency to your project's `pom.xml`. | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| @@ -58,14 +58,33 @@ If you're on Maven, add the dependency to your project's `pom.xml`. | ||||
|  | ||||
| Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j). | ||||
|  | ||||
| If you're on Gradle, add the following to your `build.gradle`: | ||||
| You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For | ||||
| example, | ||||
|  | ||||
| ```gradle | ||||
| dependencies { | ||||
|     implementation 'io.github.amithkoujalgi:ollama4j:1.0.27' | ||||
| } | ||||
| Use `slf4j-jdk14` implementation: | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| <dependency> | ||||
|     <groupId>org.slf4j</groupId> | ||||
|     <artifactId>slf4j-jdk14</artifactId> | ||||
|     <version>2.0.9</version> <!--Replace with appropriate version--> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
| or use `logback-classic` implementation: | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| <dependency> | ||||
|     <groupId>ch.qos.logback</groupId> | ||||
|     <artifactId>logback-classic</artifactId> | ||||
|     <version>1.3.11</version> <!--Replace with appropriate version--> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
| or use other suitable implementations. | ||||
|  | ||||
| Create a new Java class in your project and add this code. | ||||
|  | ||||
| ```java | ||||
|   | ||||
| @@ -17,6 +17,7 @@ import java.net.URI; | ||||
| import java.net.URISyntaxException; | ||||
| import java.net.URL; | ||||
| import java.net.http.HttpClient; | ||||
| import java.net.http.HttpConnectTimeoutException; | ||||
| import java.net.http.HttpRequest; | ||||
| import java.net.http.HttpResponse; | ||||
| import java.nio.charset.StandardCharsets; | ||||
| @@ -63,6 +64,39 @@ public class OllamaAPI { | ||||
|     this.verbose = verbose; | ||||
|   } | ||||
|  | ||||
|   /** | ||||
|    * API to check the reachability of Ollama server. | ||||
|    * | ||||
|    * @return true if the server is reachable, false otherwise. | ||||
|    */ | ||||
|   public boolean ping() { | ||||
|     String url = this.host + "/api/tags"; | ||||
|     HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|     HttpRequest httpRequest = null; | ||||
|     try { | ||||
|       httpRequest = | ||||
|           HttpRequest.newBuilder() | ||||
|               .uri(new URI(url)) | ||||
|               .header("Accept", "application/json") | ||||
|               .header("Content-type", "application/json") | ||||
|               .timeout(Duration.ofSeconds(requestTimeoutSeconds)) | ||||
|               .GET() | ||||
|               .build(); | ||||
|     } catch (URISyntaxException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|     HttpResponse<String> response = null; | ||||
|     try { | ||||
|       response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|     } catch (HttpConnectTimeoutException e) { | ||||
|       return false; | ||||
|     } catch (IOException | InterruptedException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|     int statusCode = response.statusCode(); | ||||
|     return statusCode == 200; | ||||
|   } | ||||
|  | ||||
|   /** | ||||
|    * List available models from Ollama server. | ||||
|    * | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Amith Koujalgi
					Amith Koujalgi