mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-05-15 03:47:13 +02:00
updated docs
This commit is contained in:
parent
70b136c9fc
commit
f4d8671922
8
Makefile
8
Makefile
@ -10,4 +10,10 @@ it:
|
|||||||
list-releases:
|
list-releases:
|
||||||
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
|
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
|
||||||
--compressed \
|
--compressed \
|
||||||
--silent | jq '.components[].version'
|
--silent | jq '.components[].version'
|
||||||
|
|
||||||
|
build-docs:
|
||||||
|
npm i --prefix docs && npm run build --prefix docs
|
||||||
|
|
||||||
|
start-docs:
|
||||||
|
npm i --prefix docs && npm run start --prefix docs
|
@ -8,7 +8,7 @@ This API lets you ask questions along with the image files to the LLMs.
|
|||||||
These APIs correlate to
|
These APIs correlate to
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
||||||
|
|
||||||
:::caution
|
:::note
|
||||||
|
|
||||||
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
||||||
recommended.
|
recommended.
|
||||||
|
@ -8,7 +8,7 @@ This API lets you ask questions along with the image files to the LLMs.
|
|||||||
These APIs correlate to
|
These APIs correlate to
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
||||||
|
|
||||||
:::caution
|
:::note
|
||||||
|
|
||||||
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
||||||
recommended.
|
recommended.
|
||||||
|
@ -30,17 +30,17 @@ public class Main {
|
|||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
```json
|
```javascript
|
||||||
[
|
[
|
||||||
0.5670403838157654,
|
0.5670403838157654,
|
||||||
0.009260174818336964,
|
0.009260174818336964,
|
||||||
0.23178744316101074,
|
0.23178744316101074,
|
||||||
-0.2916173040866852,
|
-0.2916173040866852,
|
||||||
-0.8924556970596313,
|
-0.8924556970596313,
|
||||||
0.8785552978515625,
|
0.8785552978515625,
|
||||||
-0.34576427936553955,
|
-0.34576427936553955,
|
||||||
0.5742510557174683,
|
0.5742510557174683,
|
||||||
-0.04222835972905159,
|
-0.04222835972905159,
|
||||||
-0.137906014919281
|
-0.137906014919281
|
||||||
]
|
]
|
||||||
```
|
```
|
@ -2,10 +2,38 @@
|
|||||||
sidebar_position: 1
|
sidebar_position: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
# Intro
|
# Introduction
|
||||||
|
|
||||||
Let's get started with **Ollama4j**.
|
Let's get started with **Ollama4j**.
|
||||||
|
|
||||||
|
## 🦙 What is Ollama?
|
||||||
|
|
||||||
|
[Ollama](https://ollama.ai/) is an advanced AI tool that allows users to easily set up and run large language models
|
||||||
|
locally (in CPU and GPU
|
||||||
|
modes). With Ollama, users can leverage powerful language models such as Llama 2 and even customize and create their own
|
||||||
|
models.
|
||||||
|
|
||||||
|
## 👨💻 Why Ollama4j?
|
||||||
|
|
||||||
|
Ollama4j was built for the simple purpose of integrating Ollama with Java applications.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart LR
|
||||||
|
o4j[Ollama4j]
|
||||||
|
o[Ollama Server]
|
||||||
|
o4j -->|Communicates with| o;
|
||||||
|
m[Models]
|
||||||
|
p[Your Java Project]
|
||||||
|
subgraph Your Java Environment
|
||||||
|
direction TB
|
||||||
|
p -->|Uses| o4j
|
||||||
|
end
|
||||||
|
subgraph Ollama Setup
|
||||||
|
direction TB
|
||||||
|
o -->|Manages| m
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### What you'll need
|
### What you'll need
|
||||||
|
@ -131,8 +131,13 @@ const config = {
|
|||||||
prism: {
|
prism: {
|
||||||
theme: prismThemes.github,
|
theme: prismThemes.github,
|
||||||
darkTheme: prismThemes.dracula,
|
darkTheme: prismThemes.dracula,
|
||||||
|
additionalLanguages: ['java'],
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
markdown: {
|
||||||
|
mermaid: true,
|
||||||
|
},
|
||||||
|
themes: ['@docusaurus/theme-mermaid']
|
||||||
};
|
};
|
||||||
|
|
||||||
export default config;
|
export default config;
|
||||||
|
1136
docs/package-lock.json
generated
1136
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -16,6 +16,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "3.0.1",
|
"@docusaurus/core": "3.0.1",
|
||||||
"@docusaurus/preset-classic": "3.0.1",
|
"@docusaurus/preset-classic": "3.0.1",
|
||||||
|
"@docusaurus/theme-mermaid": "^3.0.1",
|
||||||
"@mdx-js/react": "^3.0.0",
|
"@mdx-js/react": "^3.0.0",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
"prism-react-renderer": "^2.3.0",
|
"prism-react-renderer": "^2.3.0",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user