Compare commits

...

6 Commits

Author SHA1 Message Date
amithkoujalgi
b15066a204 [maven-release-plugin] prepare release v1.0.40 2023-12-30 20:05:08 +00:00
Amith Koujalgi
e2b29b6a07 added Prompt Builder 2023-12-31 01:33:59 +05:30
amithkoujalgi
7470ebe846 [maven-release-plugin] prepare for next development iteration 2023-12-30 17:42:18 +00:00
amithkoujalgi
422efa68aa [maven-release-plugin] prepare release v1.0.39 2023-12-30 17:42:17 +00:00
Amith Koujalgi
f4d8671922 updated docs 2023-12-30 23:10:50 +05:30
amithkoujalgi
70b136c9fc [maven-release-plugin] prepare for next development iteration 2023-12-30 15:53:15 +00:00
12 changed files with 1343 additions and 18 deletions

View File

@@ -10,4 +10,16 @@ it:
list-releases:
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
--compressed \
--silent | jq '.components[].version'
--silent | jq '.components[].version'
build-docs:
npm i --prefix docs && npm run build --prefix docs
start-docs:
npm i --prefix docs && npm run start --prefix docs
start-cpu:
docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
start-gpu:
docker run -it --gpus=all -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama

View File

@@ -8,7 +8,7 @@ This API lets you ask questions along with the image files to the LLMs.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
:::caution
:::note
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
recommended.

View File

@@ -8,7 +8,7 @@ This API lets you ask questions along with the image files to the LLMs.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
:::caution
:::note
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
recommended.

View File

@@ -1,5 +1,5 @@
---
sidebar_position: 5
sidebar_position: 6
---
# Generate Embeddings
@@ -30,17 +30,17 @@ public class Main {
You will get a response similar to:
```json
```javascript
[
0.5670403838157654,
0.009260174818336964,
0.23178744316101074,
-0.2916173040866852,
-0.8924556970596313,
0.8785552978515625,
-0.34576427936553955,
0.5742510557174683,
-0.04222835972905159,
-0.137906014919281
0.5670403838157654,
0.009260174818336964,
0.23178744316101074,
-0.2916173040866852,
-0.8924556970596313,
0.8785552978515625,
-0.34576427936553955,
0.5742510557174683,
-0.04222835972905159,
-0.137906014919281
]
```

View File

@@ -0,0 +1,73 @@
---
sidebar_position: 5
---
# Prompt Builder
This is designed for prompt engineering. It allows you to easily build the prompt text for zero-shot, one-shot, few-shot
inferences.
```java
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.utils.PromptBuilder;
public class AskPhi {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(10);
String model = OllamaModelType.PHI;
PromptBuilder promptBuilder =
new PromptBuilder()
.addLine("You are an expert coder and understand different programming languages.")
.addLine("Given a question, answer ONLY with code.")
.addLine("Produce clean, formatted and indented code in markdown format.")
.addLine(
"DO NOT include ANY extra text apart from code. Follow this instruction very strictly!")
.addLine("If there's any additional information you want to add, use comments within code.")
.addLine("Answer only in the programming language that has been asked for.")
.addSeparator()
.addLine("Example: Sum 2 numbers in Python")
.addLine("Answer:")
.addLine("```python")
.addLine("def sum(num1: int, num2: int) -> int:")
.addLine(" return num1 + num2")
.addLine("```")
.addSeparator()
.add("How do I read a file in Go and print its contents to stdout?");
OllamaResult response = ollamaAPI.ask(model, promptBuilder.build());
System.out.println(response.getResponse());
}
}
```
You will get a response similar to:
```go
package main
import (
"fmt"
"io/ioutil"
)
func readFile(fileName string) {
file, err := ioutil.ReadFile(fileName)
if err != nil {
fmt.Fprintln(os.Stderr, "Error reading file:", err.Error())
return
}
f, _ := ioutil.ReadFile("file.txt")
if f != nil {
fmt.Println(f.String())
}
}
```

View File

@@ -2,10 +2,38 @@
sidebar_position: 1
---
# Intro
# Introduction
Let's get started with **Ollama4j**.
## 🦙 What is Ollama?
[Ollama](https://ollama.ai/) is an advanced AI tool that allows users to easily set up and run large language models
locally (in CPU and GPU
modes). With Ollama, users can leverage powerful language models such as Llama 2 and even customize and create their own
models.
## 👨‍💻 Why Ollama4j?
Ollama4j was built for the simple purpose of integrating Ollama with Java applications.
```mermaid
flowchart LR
o4j[Ollama4j]
o[Ollama Server]
o4j -->|Communicates with| o;
m[Models]
p[Your Java Project]
subgraph Your Java Environment
direction TB
p -->|Uses| o4j
end
subgraph Ollama Setup
direction TB
o -->|Manages| m
end
```
## Getting Started
### What you'll need

View File

@@ -131,8 +131,13 @@ const config = {
prism: {
theme: prismThemes.github,
darkTheme: prismThemes.dracula,
additionalLanguages: ['java'],
},
}),
markdown: {
mermaid: true,
},
themes: ['@docusaurus/theme-mermaid']
};
export default config;

1136
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -16,6 +16,7 @@
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/preset-classic": "3.0.1",
"@docusaurus/theme-mermaid": "^3.0.1",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",

View File

@@ -4,7 +4,7 @@
<groupId>io.github.amithkoujalgi</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.38</version>
<version>1.0.40</version>
<name>Ollama4j</name>
<description>Java library for interacting with Ollama API.</description>
@@ -39,7 +39,7 @@
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
<url>https://github.com/amithkoujalgi/ollama4j</url>
<tag>v1.0.38</tag>
<tag>v1.0.40</tag>
</scm>
<build>

View File

@@ -21,6 +21,7 @@ public class OllamaModelType {
public static final String VICUNA = "vicuna";
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
public static final String PHIND_CODELLAMA = "phind-codellama";
public static final String PHI = "phi";
public static final String ZEPHYR = "zephyr";
public static final String WIZARDCODER = "wizardcoder";
public static final String MISTRAL_OPENORCA = "mistral-openorca";

View File

@@ -0,0 +1,69 @@
package io.github.amithkoujalgi.ollama4j.core.utils;
/**
* The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It
* provides methods for adding text, adding lines, adding separators, and building the final prompt.
*
* <p>Example usage:
*
* <pre>{@code
* PromptBuilder promptBuilder = new PromptBuilder();
* promptBuilder.add("This is a sample prompt for language models.")
* .addLine("You can add lines to provide context.")
* .addSeparator()
* .add("Feel free to customize as needed.");
* String finalPrompt = promptBuilder.build();
* System.out.println(finalPrompt);
* }</pre>
*/
public class PromptBuilder {
private final StringBuilder prompt;
/** Constructs a new {@code PromptBuilder} with an empty prompt. */
public PromptBuilder() {
this.prompt = new StringBuilder();
}
/**
* Appends the specified text to the prompt.
*
* @param text the text to be added to the prompt
* @return a reference to this {@code PromptBuilder} instance for method chaining
*/
public PromptBuilder add(String text) {
prompt.append(text);
return this;
}
/**
* Appends the specified text followed by a newline character to the prompt.
*
* @param text the text to be added as a line to the prompt
* @return a reference to this {@code PromptBuilder} instance for method chaining
*/
public PromptBuilder addLine(String text) {
prompt.append(text).append("\n");
return this;
}
/**
* Appends a separator line to the prompt. The separator is a newline followed by a line of
* dashes.
*
* @return a reference to this {@code PromptBuilder} instance for method chaining
*/
public PromptBuilder addSeparator() {
prompt.append("\n--------------------------------------------------\n");
return this;
}
/**
* Builds and returns the final prompt as a string.
*
* @return the final prompt as a string
*/
public String build() {
return prompt.toString();
}
}