updated docs

This commit is contained in:
Amith Koujalgi 2023-12-26 20:15:43 +05:30
parent 16c39a0a28
commit df5c451a12
5 changed files with 147 additions and 376 deletions

362
README.md

File diff suppressed because one or more lines are too long

View File

@ -17,15 +17,16 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaAsyncResultCallback ollamaAsyncResultCallback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, String prompt = "Who are you?";
"Who are you?");
while (true) { OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt);
if (ollamaAsyncResultCallback.isComplete()) {
System.out.println(ollamaAsyncResultCallback.getResponse()); while (!callback.isComplete() || !callback.getStream().isEmpty()) {
break; // poll for data from the response stream
String result = callback.getStream().poll();
if (response != null) {
System.out.print(result.getResponse());
} }
// introduce sleep to check for status with a time interval
Thread.sleep(100); Thread.sleep(100);
} }
} }

View File

@ -8,11 +8,13 @@ This API lets you ask questions to the LLMs in a synchronous way.
These APIs correlate to These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
## Try asking a question about the model.
```java ```java
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
String host = "http://localhost:11434/"; String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
@ -30,4 +32,75 @@ You will get a response similar to:
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide > manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that > range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
> require > require
> natural language understanding and generation capabilities. > natural language understanding and generation capabilities.
## Try asking a question from general topics.
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
String prompt = "List all cricket world cup teams of 2019.";
OllamaResult result = ollamaAPI.ask(OllamaModelType.LLAMA2, prompt);
System.out.println(result.getResponse());
}
}
```
You'd then get a response from the model:
> The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The
> following teams
> participated in the tournament:
>
> 1. Afghanistan
> 2. Australia
> 3. Bangladesh
> 4. England
> 5. India
> 6. New Zealand
> 7. Pakistan
> 8. South Africa
> 9. Sri Lanka
> 10. West Indies
>
> These teams competed in a round-robin format, with the top four teams advancing to the
> semi-finals. The tournament was
> won by the England cricket team, who defeated New Zealand in the final.
## Try asking for a Database query for your data schema.
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
String prompt = SamplePrompts.getSampleDatabasePromptWithQuestion(
"List all customer names who have bought one or more products");
OllamaResult result = ollamaAPI.ask(OllamaModelType.SQLCODER, prompt);
System.out.println(result.getResponse());
}
}
```
_Note: Here I've used
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
containing a database schema from within this library for demonstration purposes._
You'd then get a response from the model:
```sql
SELECT customers.name
FROM sales
JOIN customers ON sales.customer_id = customers.customer_id
GROUP BY customers.name;
```

View File

@ -13,7 +13,7 @@ Let's get started with **Ollama4j**.
- **[Ollama](https://ollama.ai/download)** - **[Ollama](https://ollama.ai/download)**
- **[Oracle JDK](https://www.oracle.com/java/technologies/javase/jdk11-archive-downloads.html)** or - **[Oracle JDK](https://www.oracle.com/java/technologies/javase/jdk11-archive-downloads.html)** or
**[Open JDK](https://jdk.java.net/archive/)** 11.0 or above. **[Open JDK](https://jdk.java.net/archive/)** 11.0 or above.
- **[Maven](https://maven.apache.org/download.cgi)** or **[Gradle](https://gradle.org/install/)** - **[Maven](https://maven.apache.org/download.cgi)**
### Start Ollama server ### Start Ollama server
@ -43,9 +43,9 @@ The command runs the Ollama server locally at **http://localhost:11434/**.
### Setup your project ### Setup your project
Get started by **creating a new project** on your favorite IDE. Get started by **creating a new Maven project** on your favorite IDE.
If you're on Maven, add the dependency to your project's `pom.xml`. Add the dependency to your project's `pom.xml`.
```xml ```xml
@ -58,14 +58,33 @@ If you're on Maven, add the dependency to your project's `pom.xml`.
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j). Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j).
If you're on Gradle, add the following to your `build.gradle`: You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
example,
```gradle Use `slf4j-jdk14` implementation:
dependencies {
implementation 'io.github.amithkoujalgi:ollama4j:1.0.27' ```xml
}
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
<version>2.0.9</version> <!--Replace with appropriate version-->
</dependency>
``` ```
or use `logback-classic` implementation:
```xml
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.3.11</version> <!--Replace with appropriate version-->
</dependency>
```
or use other suitable implementations.
Create a new Java class in your project and add this code. Create a new Java class in your project and add this code.
```java ```java

View File

@ -17,6 +17,7 @@ import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.net.http.HttpClient; import java.net.http.HttpClient;
import java.net.http.HttpConnectTimeoutException;
import java.net.http.HttpRequest; import java.net.http.HttpRequest;
import java.net.http.HttpResponse; import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -63,6 +64,39 @@ public class OllamaAPI {
this.verbose = verbose; this.verbose = verbose;
} }
/**
* API to check the reachability of Ollama server.
*
* @return true if the server is reachable, false otherwise.
*/
public boolean ping() {
String url = this.host + "/api/tags";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = null;
try {
httpRequest =
HttpRequest.newBuilder()
.uri(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
.GET()
.build();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
HttpResponse<String> response = null;
try {
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
} catch (HttpConnectTimeoutException e) {
return false;
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
int statusCode = response.statusCode();
return statusCode == 200;
}
/** /**
* List available models from Ollama server. * List available models from Ollama server.
* *