Added CodeEmbed component to embed code snippets in markdowns

This commit is contained in:
amithkoujalgi 2025-04-18 12:15:49 +05:30
parent 2b036c8a62
commit 9c181486a5
No known key found for this signature in database
GPG Key ID: E29A37746AF94B70
13 changed files with 233 additions and 221 deletions

View File

@ -2,6 +2,8 @@
sidebar_position: 7 sidebar_position: 7
--- ---
import CodeEmbed from '@site/src/components/CodeEmbed';
# Chat # Chat
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
@ -273,10 +275,4 @@ You will get a response similar to:
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed > or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
> confidently. > confidently.
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatExample.java" />
[//]: # (Generated using: https://emgithub.com/)
<iframe style={{ width: '100%', height: '919px', border: 'none' }} allow="clipboard-write" src="https://emgithub.com/iframe.html?target=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j-examples%2Fblob%2Fmain%2Fsrc%2Fmain%2Fjava%2Fio%2Fgithub%2Follama4j%2Fexamples%2FChatExample.java&style=default&type=code&showBorder=on&showLineNumbers=on&showFileMeta=on&showFullPath=on&showCopy=on" />
<a href="https://github.com/ollama4j/ollama4j-examples/blob/main/src/main/java/io/github/ollama4j/examples/ChatExample.java" target="_blank">
View ChatExample.java on GitHub
</a>

View File

@ -2,6 +2,8 @@
sidebar_position: 1 sidebar_position: 1
--- ---
import CodeEmbed from '@site/src/components/CodeEmbed';
# Generate - Sync # Generate - Sync
This API lets you ask questions to the LLMs in a synchronous way. This API lets you ask questions to the LLMs in a synchronous way.
@ -15,77 +17,32 @@ to [this](/apis-extras/options-builder).
## Try asking a question about the model ## Try asking a question about the model
```java <CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/Generate.java" />
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaResult result =
ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
System.out.println(result.getResponse());
}
}
```
You will get a response similar to: You will get a response similar to:
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational > I am a large language model created by Alibaba Cloud. My purpose is to assist users in generating text, answering
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide > questions, and completing tasks. I aim to be user-friendly and easy to understand for everyone who interacts with me.
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
> require
> natural language understanding and generation capabilities.
## Try asking a question, receiving the answer streamed ## Try asking a question, receiving the answer streamed
```java <CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateStreamingWithTokenConcatenation.java" />
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
// define a stream handler (Consumer<String>)
OllamaStreamHandler streamHandler = (s) -> {
System.out.println(s);
};
// Should be called using seperate thread to gain non blocking streaming effect.
OllamaResult result = ollamaAPI.generate(config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().build(), streamHandler);
System.out.println("Full response: " + result.getResponse());
}
}
```
You will get a response similar to: You will get a response similar to:
> The > The
>
> The capital > The capital
>
> The capital of > The capital of
>
> The capital of France > The capital of France
>
> The capital of France is > The capital of France is
>
> The capital of France is Paris > The capital of France is Paris
>
> The capital of France is Paris. > The capital of France is Paris.
> Full response: The capital of France is Paris.
## Try asking a question from general topics ## Try asking a question from general topics
@ -161,7 +118,6 @@ public class Main {
``` ```
_Note: Here I've used _Note: Here I've used
a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
containing a database schema from within this library for demonstration purposes._ containing a database schema from within this library for demonstration purposes._
@ -175,7 +131,6 @@ FROM sales
GROUP BY customers.name; GROUP BY customers.name;
``` ```
## Generate structured output ## Generate structured output
### With response as a `Map` ### With response as a `Map`

View File

@ -2,28 +2,27 @@
sidebar_position: 5 sidebar_position: 5
--- ---
import CodeEmbed from '@site/src/components/CodeEmbed';
# Create Model # Create Model
This API lets you create a custom model on the Ollama server. This API lets you create a custom model on the Ollama server.
### Create a custom model from an existing model in the Ollama server ### Create a custom model from an existing model in the Ollama server
```java title="CreateModel.java" <CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/CreateModel.java" />
import io.github.ollama4j.OllamaAPI;
public class CreateModel { You would see these logs while the custom model is being created:
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.createModel(CustomModelRequest.builder().model("mario").from("llama3.2:latest").system("You are Mario from Super Mario Bros.").build());
}
}
``` ```
{"status":"using existing layer sha256:fad2a06e4cc705c2fa8bec5477ddb00dc0c859ac184c34dcc5586663774161ca"}
{"status":"using existing layer sha256:41c2cf8c272f6fb0080a97cd9d9bd7d4604072b80a0b10e7d65ca26ef5000c0c"}
{"status":"using existing layer sha256:1da0581fd4ce92dcf5a66b1da737cf215d8dcf25aa1b98b44443aaf7173155f5"}
{"status":"creating new layer sha256:941b69ca7dc2a85c053c38d9e8029c9df6224e545060954fa97587f87c044a64"}
{"status":"using existing layer sha256:f02dd72bb2423204352eabc5637b44d79d17f109fdb510a7c51455892aa2d216"}
{"status":"writing manifest"}
{"status":"success"}
```
Once created, you can see it when you use [list models](./list-models) API. Once created, you can see it when you use [list models](./list-models) API.
[Read more](https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model) about custom model creation and the parameters available for model creation. [Read more](https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model) about custom model creation and the parameters available for model creation.

View File

@ -2,27 +2,12 @@
sidebar_position: 6 sidebar_position: 6
--- ---
import CodeEmbed from '@site/src/components/CodeEmbed';
# Delete Model # Delete Model
This API lets you create a delete a model from the Ollama server. This API lets you create a delete a model from the Ollama server.
```java title="DeleteModel.java" <CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/DeleteModel.java" />
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setVerbose(false);
ollamaAPI.deleteModel("mycustommodel", true);
}
}
```
Once deleted, you can verify it using [list models](./list-models) API. Once deleted, you can verify it using [list models](./list-models) API.

File diff suppressed because one or more lines are too long

View File

@ -30,27 +30,9 @@ The following is the sample output:
This API Fetches the tags associated with a specific model from Ollama library. This API Fetches the tags associated with a specific model from Ollama library.
```java title="GetLibraryModelTags.java" <CodeEmbed
import io.github.ollama4j.OllamaAPI; src='https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GetLibraryModelTags.java'>
import io.github.ollama4j.models.response.LibraryModel; </CodeEmbed>
import io.github.ollama4j.models.response.LibraryModelDetail;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<LibraryModel> libraryModels = ollamaAPI.listModelsFromLibrary();
LibraryModelDetail libraryModelDetail = ollamaAPI.getLibraryModelDetails(libraryModels.get(0));
System.out.println(libraryModelDetail);
}
}
```
The following is the sample output: The following is the sample output:
@ -69,24 +51,9 @@ LibraryModelDetail(
This API finds a specific model using model `name` and `tag` from Ollama library. This API finds a specific model using model `name` and `tag` from Ollama library.
```java title="FindLibraryModel.java" <CodeEmbed
import io.github.ollama4j.OllamaAPI; src='https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/FindLibraryModel.java'>
import io.github.ollama4j.models.response.LibraryModelTag; </CodeEmbed>
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b");
System.out.println(libraryModelTag);
}
}
```
The following is the sample output: The following is the sample output:
@ -98,21 +65,6 @@ LibraryModelTag(name=qwen2.5, tag=7b, size=4.7GB, lastUpdated=7 weeks ago)
You can use `LibraryModelTag` to pull models into Ollama server. You can use `LibraryModelTag` to pull models into Ollama server.
```java title="PullLibraryModelTags.java" <CodeEmbed
import io.github.ollama4j.OllamaAPI; src='https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/PullLibraryModelTags.java'>
import io.github.ollama4j.models.response.LibraryModelTag; </CodeEmbed>
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b");
ollamaAPI.pullModel(libraryModelTag);
}
}
```

View File

@ -2,34 +2,23 @@
sidebar_position: 2 sidebar_position: 2
--- ---
import CodeEmbed from '@site/src/components/CodeEmbed';
# List Local Models # List Local Models
This API lets you list downloaded/available models on the Ollama server. This API lets you list downloaded/available models on the Ollama server.
```java title="ListModels.java" <CodeEmbed
import io.github.ollama4j.OllamaAPI; src='https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ListLocalModels.java'>
import io.github.ollama4j.models.response.Model; </CodeEmbed>
import java.util.List;
public class ListModels {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<Model> models = ollamaAPI.listModels();
models.forEach(model -> System.out.println(model.getName()));
}
}
```
If you have any models already downloaded on Ollama server, you would have them listed as follows: If you have any models already downloaded on Ollama server, you would have them listed as follows:
```bash ```bash
llama2:latest llama2:latest
llama3.2:1b
qwen2:0.5b
qwen:0.5b
sqlcoder:latest sqlcoder:latest
``` ```

View File

@ -2,26 +2,15 @@
sidebar_position: 3 sidebar_position: 3
--- ---
import CodeEmbed from '@site/src/components/CodeEmbed';
# Pull Model # Pull Model
This API lets you pull a model on the Ollama server. This API lets you pull a model on the Ollama server.
```java title="PullModel.java" <CodeEmbed
import io.github.ollama4j.OllamaAPI; src='https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/PullModel.java'>
import io.github.ollama4j.types.OllamaModelType; </CodeEmbed>
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.pullModel(OllamaModelType.LLAMA2);
}
}
```
Once downloaded, you can see them when you use [list models](./list-models) API. Once downloaded, you can see them when you use [list models](./list-models) API.

44
docs/package-lock.json generated
View File

@ -13,11 +13,14 @@
"@docusaurus/plugin-google-gtag": "^3.4.0", "@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0", "@docusaurus/theme-mermaid": "^3.4.0",
"@iconify/react": "^5.2.1",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0", "clsx": "^2.0.0",
"font-awesome": "^4.7.0",
"prism-react-renderer": "^2.3.0", "prism-react-renderer": "^2.3.0",
"react": "^18.0.0", "react": "^18.0.0",
"react-dom": "^18.0.0" "react-dom": "^18.0.0",
"react-icons": "^5.5.0"
}, },
"devDependencies": { "devDependencies": {
"@docusaurus/module-type-aliases": "^3.4.0", "@docusaurus/module-type-aliases": "^3.4.0",
@ -3066,6 +3069,27 @@
"@hapi/hoek": "^9.0.0" "@hapi/hoek": "^9.0.0"
} }
}, },
"node_modules/@iconify/react": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@iconify/react/-/react-5.2.1.tgz",
"integrity": "sha512-37GDR3fYDZmnmUn9RagyaX+zca24jfVOMY8E1IXTqJuE8pxNtN51KWPQe3VODOWvuUurq7q9uUu3CFrpqj5Iqg==",
"license": "MIT",
"dependencies": {
"@iconify/types": "^2.0.0"
},
"funding": {
"url": "https://github.com/sponsors/cyberalien"
},
"peerDependencies": {
"react": ">=16"
}
},
"node_modules/@iconify/types": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz",
"integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==",
"license": "MIT"
},
"node_modules/@jest/schemas": { "node_modules/@jest/schemas": {
"version": "29.6.3", "version": "29.6.3",
"resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz",
@ -7266,6 +7290,15 @@
} }
} }
}, },
"node_modules/font-awesome": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/font-awesome/-/font-awesome-4.7.0.tgz",
"integrity": "sha512-U6kGnykA/6bFmg1M/oT9EkFeIYv7JlX3bozwQJWiiLz6L0w3F5vBVPxHlwyX/vtNq1ckcpRKOB9f2Qal/VtFpg==",
"license": "(OFL-1.1 AND MIT)",
"engines": {
"node": ">=0.10.3"
}
},
"node_modules/fork-ts-checker-webpack-plugin": { "node_modules/fork-ts-checker-webpack-plugin": {
"version": "6.5.3", "version": "6.5.3",
"resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz", "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz",
@ -13384,6 +13417,15 @@
"react-dom": "^16.6.0 || ^17.0.0 || ^18.0.0" "react-dom": "^16.6.0 || ^17.0.0 || ^18.0.0"
} }
}, },
"node_modules/react-icons": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.5.0.tgz",
"integrity": "sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw==",
"license": "MIT",
"peerDependencies": {
"react": "*"
}
},
"node_modules/react-is": { "node_modules/react-is": {
"version": "16.13.1", "version": "16.13.1",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",

View File

@ -19,11 +19,14 @@
"@docusaurus/plugin-google-gtag": "^3.4.0", "@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0", "@docusaurus/theme-mermaid": "^3.4.0",
"@iconify/react": "^5.2.1",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0", "clsx": "^2.0.0",
"font-awesome": "^4.7.0",
"prism-react-renderer": "^2.3.0", "prism-react-renderer": "^2.3.0",
"react": "^18.0.0", "react": "^18.0.0",
"react-dom": "^18.0.0" "react-dom": "^18.0.0",
"react-icons": "^5.5.0"
}, },
"devDependencies": { "devDependencies": {
"@docusaurus/module-type-aliases": "^3.4.0", "@docusaurus/module-type-aliases": "^3.4.0",

View File

@ -1,5 +1,82 @@
// import React, { useState, useEffect } from 'react';
// import CodeBlock from '@theme/CodeBlock';
// import Icon from '@site/src/components/Icon';
// const CodeEmbed = ({ src }) => {
// const [code, setCode] = useState('');
// const [loading, setLoading] = useState(true);
// const [error, setError] = useState(null);
// useEffect(() => {
// let isMounted = true;
// const fetchCodeFromUrl = async (url) => {
// if (!isMounted) return;
// setLoading(true);
// setError(null);
// try {
// const response = await fetch(url);
// if (!response.ok) {
// throw new Error(`HTTP error! status: ${response.status}`);
// }
// const data = await response.text();
// if (isMounted) {
// setCode(data);
// }
// } catch (err) {
// console.error('Failed to fetch code:', err);
// if (isMounted) {
// setError(err);
// setCode(`// Failed to load code from ${url}\n// ${err.message}`);
// }
// } finally {
// if (isMounted) {
// setLoading(false);
// }
// }
// };
// if (src) {
// fetchCodeFromUrl(src);
// }
// return () => {
// isMounted = false;
// };
// }, [src]);
// const githubUrl = src ? src.replace('https://raw.githubusercontent.com', 'https://github.com').replace('/refs/heads/', '/blob/') : null;
// const fileName = src ? src.substring(src.lastIndexOf('/') + 1) : null;
// return (
// loading ? (
// <div>Loading code...</div>
// ) : error ? (
// <div>Error: {error.message}</div>
// ) : (
// <div style={{ backgroundColor: 'transparent', padding: '0px', borderRadius: '5px' }}>
// <div style={{ textAlign: 'right' }}>
// {githubUrl && (
// <a href={githubUrl} target="_blank" rel="noopener noreferrer" style={{ paddingRight: '15px', color: 'gray', fontSize: '0.8em', fontStyle: 'italic', display: 'inline-flex', alignItems: 'center' }}>
// View on GitHub
// <Icon icon="mdi:github" height="48" />
// </a>
// )}
// </div>
// <CodeBlock title={fileName} className="language-java">{code}</CodeBlock>
// </div>
// )
// );
// };
// export default CodeEmbed;
import React, { useState, useEffect } from 'react'; import React, { useState, useEffect } from 'react';
import CodeBlock from '@theme/CodeBlock'; import CodeBlock from '@theme/CodeBlock';
import Icon from '@site/src/components/Icon';
const CodeEmbed = ({ src }) => { const CodeEmbed = ({ src }) => {
const [code, setCode] = useState(''); const [code, setCode] = useState('');
@ -49,21 +126,42 @@ const CodeEmbed = ({ src }) => {
const githubUrl = src ? src.replace('https://raw.githubusercontent.com', 'https://github.com').replace('/refs/heads/', '/blob/') : null; const githubUrl = src ? src.replace('https://raw.githubusercontent.com', 'https://github.com').replace('/refs/heads/', '/blob/') : null;
const fileName = src ? src.substring(src.lastIndexOf('/') + 1) : null; const fileName = src ? src.substring(src.lastIndexOf('/') + 1) : null;
const title = (
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
<a
href={githubUrl}
target="_blank"
rel="noopener noreferrer"
style={{
color: 'gray',
textDecoration: 'none',
}}
onMouseOver={e => {
e.target.style.textDecoration = 'underline';
}}
onMouseOut={e => {
e.target.style.textDecoration = 'none';
}}
>
<span>{fileName}</span>
</a>
{githubUrl && (
<a href={githubUrl} target="_blank" rel="noopener noreferrer" style={{ color: 'gray', fontSize: '0.9em', fontStyle: 'italic', display: 'inline-flex', alignItems: 'center' }}>
View on GitHub
<Icon icon="mdi:github" height="1em" />
</a>
)}
</div>
);
return ( return (
loading ? ( loading ? (
<div>Loading code...</div> <div>Loading code...</div>
) : error ? ( ) : error ? (
<div>Error: {error.message}</div> <div>Error: {error.message}</div>
) : ( ) : (
<div style={{ backgroundColor: '#f5f5f5', padding: '0px', borderRadius: '5px' }}> <div style={{ backgroundColor: 'transparent', padding: '0px', borderRadius: '5px' }}>
<div style={{ textAlign: 'right' }}> <CodeBlock title={title} className="language-java">{code}</CodeBlock>
{githubUrl && (
<a href={githubUrl} target="_blank" rel="noopener noreferrer" style={{ paddingRight: '15px', color: 'gray', fontSize: '0.8em', fontStyle: 'italic', display: 'inline-flex', alignItems: 'center' }}>
View on GitHub
</a>
)}
</div>
<CodeBlock title={fileName} className="language-java">{code}</CodeBlock>
</div> </div>
) )
); );

View File

@ -0,0 +1,9 @@
// @site/src/components/Icon.js
import React from 'react';
import { Icon as IconifyIcon } from '@iconify/react';
const IIcon = ({ icon, color, width = '24', height = '24' }) => (
<IconifyIcon icon={icon} color={color} width={width} height={height} />
);
export default IIcon;

View File

@ -4,6 +4,8 @@
* work well for content-centric websites. * work well for content-centric websites.
*/ */
@import 'font-awesome/css/font-awesome.min.css';
/* You can override the default Infima variables here. */ /* You can override the default Infima variables here. */
:root { :root {
--ifm-color-primary: #2e8555; --ifm-color-primary: #2e8555;