diff --git a/Makefile b/Makefile index 2753d0e..393d5a9 100644 --- a/Makefile +++ b/Makefile @@ -30,10 +30,10 @@ list-releases: --silent | jq -r '.components[].version' docs-build: - npm i --prefix docs && npm run build --prefix docs + cd ./docs && npm install --prefix && npm run build docs-serve: - npm i --prefix docs && npm run start --prefix docs + cd ./docs && npm install && npm run start start-cpu: docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama diff --git a/docs/docs/apis-extras/basic-auth.md b/docs/docs/apis-extras/basic-auth.md index 15f681c..1e96177 100644 --- a/docs/docs/apis-extras/basic-auth.md +++ b/docs/docs/apis-extras/basic-auth.md @@ -1,8 +1,8 @@ --- -sidebar_position: 2 +sidebar_position: 3 --- -# Set Basic Authentication +# Basic Auth This API lets you set the basic authentication for the Ollama client. This would help in scenarios where Ollama server would be setup behind a gateway/reverse proxy with basic auth. diff --git a/docs/docs/apis-extras/bearer-auth.md b/docs/docs/apis-extras/bearer-auth.md index 1ae3e80..cdd4b3a 100644 --- a/docs/docs/apis-extras/bearer-auth.md +++ b/docs/docs/apis-extras/bearer-auth.md @@ -1,8 +1,8 @@ --- -sidebar_position: 2 +sidebar_position: 4 --- -# Set Bearer Authentication +# Bearer Auth This API lets you set the bearer authentication for the Ollama client. This would help in scenarios where Ollama server would be setup behind a gateway/reverse proxy with bearer auth. diff --git a/docs/docs/apis-extras/logging.md b/docs/docs/apis-extras/logging.md new file mode 100644 index 0000000..d73ba10 --- /dev/null +++ b/docs/docs/apis-extras/logging.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 7 +--- + +# Logging + +### Using with SLF4J and Logback + +Add a `logback.xml` file to your `src/main/resources` folder with the following content: + +```xml + + + + + + + + + %d{yyyy-MM-dd HH:mm:ss} %-5level %logger{36} - %msg%n + + + + + +``` \ No newline at end of file diff --git a/docs/docs/apis-extras/ping.md b/docs/docs/apis-extras/ping.md index b52fff0..256c26b 100644 --- a/docs/docs/apis-extras/ping.md +++ b/docs/docs/apis-extras/ping.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 5 --- # Ping diff --git a/docs/docs/apis-generate/prompt-builder.md b/docs/docs/apis-extras/prompt-builder.md similarity index 98% rename from docs/docs/apis-generate/prompt-builder.md rename to docs/docs/apis-extras/prompt-builder.md index dfbd6a8..3240591 100644 --- a/docs/docs/apis-generate/prompt-builder.md +++ b/docs/docs/apis-extras/prompt-builder.md @@ -1,5 +1,5 @@ --- -sidebar_position: 10 +sidebar_position: 2 --- # Prompt Builder @@ -51,6 +51,7 @@ public class Main { You will get a response similar to: +:::tip[LLM Response] ```go package main @@ -71,4 +72,5 @@ func readFile(fileName string) { fmt.Println(f.String()) } } -``` \ No newline at end of file +``` +::: \ No newline at end of file diff --git a/docs/docs/apis-extras/ps.md b/docs/docs/apis-extras/ps.md index 4f37e04..faea1c3 100644 --- a/docs/docs/apis-extras/ps.md +++ b/docs/docs/apis-extras/ps.md @@ -1,5 +1,5 @@ --- -sidebar_position: 4 +sidebar_position: 5 --- # PS diff --git a/docs/docs/apis-extras/request-timeout.md b/docs/docs/apis-extras/timeouts.md similarity index 91% rename from docs/docs/apis-extras/request-timeout.md rename to docs/docs/apis-extras/timeouts.md index f22971a..a9e6b62 100644 --- a/docs/docs/apis-extras/request-timeout.md +++ b/docs/docs/apis-extras/timeouts.md @@ -2,7 +2,9 @@ sidebar_position: 2 --- -# Set Request Timeout +# Timeouts + +## Set Request Timeout This API lets you set the request timeout for the Ollama client. diff --git a/docs/docs/apis-generate/generate-async.md b/docs/docs/apis-generate/generate-async.md index a2eb5af..fe659ce 100644 --- a/docs/docs/apis-generate/generate-async.md +++ b/docs/docs/apis-generate/generate-async.md @@ -1,5 +1,5 @@ --- -sidebar_position: 2 +sidebar_position: 6 --- import CodeEmbed from '@site/src/components/CodeEmbed'; diff --git a/docs/docs/apis-generate/generate-embeddings.md b/docs/docs/apis-generate/generate-embeddings.md index f716feb..27894a5 100644 --- a/docs/docs/apis-generate/generate-embeddings.md +++ b/docs/docs/apis-generate/generate-embeddings.md @@ -1,5 +1,5 @@ --- -sidebar_position: 5 +sidebar_position: 1 --- import CodeEmbed from '@site/src/components/CodeEmbed'; diff --git a/docs/docs/apis-generate/generate-thinking.md b/docs/docs/apis-generate/generate-thinking.md index 93a8dea..d38634d 100644 --- a/docs/docs/apis-generate/generate-thinking.md +++ b/docs/docs/apis-generate/generate-thinking.md @@ -1,5 +1,5 @@ --- -sidebar_position: 2 +sidebar_position: 3 --- import CodeEmbed from '@site/src/components/CodeEmbed'; diff --git a/docs/docs/apis-generate/generate-with-image-urls.md b/docs/docs/apis-generate/generate-with-image-urls.md deleted file mode 100644 index cc89e5d..0000000 --- a/docs/docs/apis-generate/generate-with-image-urls.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -sidebar_position: 4 ---- - -import CodeEmbed from '@site/src/components/CodeEmbed'; - -# Generate with Image URLs - -This API lets you ask questions along with the image files to the LLMs. -This API corresponds to -the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. - -:::note - -Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is -recommended. - -::: - -## Ask (Sync) - -Passing the link of this image the following code: - -![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) - - - -You will get a response similar to: - -::::tip[LLM Response] -This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to -be enjoying its time outdoors, perhaps on a lake. -:::: \ No newline at end of file diff --git a/docs/docs/apis-generate/generate-with-image-files.md b/docs/docs/apis-generate/generate-with-images.md similarity index 52% rename from docs/docs/apis-generate/generate-with-image-files.md rename to docs/docs/apis-generate/generate-with-images.md index e17888d..32f4e49 100644 --- a/docs/docs/apis-generate/generate-with-image-files.md +++ b/docs/docs/apis-generate/generate-with-images.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 4 --- import CodeEmbed from '@site/src/components/CodeEmbed'; @@ -27,6 +27,34 @@ If you have this image downloaded and you pass the path to the downloaded image You will get a response similar to: +::::tip[LLM Response] +This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to +be enjoying its time outdoors, perhaps on a lake. +:::: + +# Generate with Image URLs + +This API lets you ask questions along with the image files to the LLMs. +This API corresponds to +the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. + +:::note + +Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is +recommended. + +::: + +## Ask (Sync) + +Passing the link of this image the following code: + +![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) + + + +You will get a response similar to: + ::::tip[LLM Response] This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to be enjoying its time outdoors, perhaps on a lake. diff --git a/docs/docs/apis-generate/generate-with-tools.md b/docs/docs/apis-generate/generate-with-tools.md index d25a5fc..3577c09 100644 --- a/docs/docs/apis-generate/generate-with-tools.md +++ b/docs/docs/apis-generate/generate-with-tools.md @@ -1,5 +1,5 @@ --- -sidebar_position: 6 +sidebar_position: 5 --- import CodeEmbed from '@site/src/components/CodeEmbed'; diff --git a/docs/docs/apis-generate/generate.md b/docs/docs/apis-generate/generate.md index 553a014..a4b37dc 100644 --- a/docs/docs/apis-generate/generate.md +++ b/docs/docs/apis-generate/generate.md @@ -1,11 +1,11 @@ --- -sidebar_position: 1 +sidebar_position: 2 --- import CodeEmbed from '@site/src/components/CodeEmbed'; import TypewriterTextarea from '@site/src/components/TypewriterTextarea'; -# Generate (Sync) +# Generate This API lets you ask questions to the LLMs in a synchronous way. This API corresponds to diff --git a/docs/docs/apis-model-management/_category_.json b/docs/docs/apis-model-management/_category_.json index 53539cf..48f345c 100644 --- a/docs/docs/apis-model-management/_category_.json +++ b/docs/docs/apis-model-management/_category_.json @@ -1,5 +1,5 @@ { - "label": "APIs - Model Management", + "label": "APIs - Manage Models", "position": 2, "link": { "type": "generated-index",