An attempt at getting image data back

This commit is contained in:
2024-07-14 00:27:33 +02:00
parent e026bc93f7
commit 6452d2e774
1314 changed files with 218350 additions and 38 deletions

View File

@@ -0,0 +1,90 @@
# SPDX-License-Identifier: CC-BY-SA-4.0
# Doxyfile 1.9.5
PROJECT_NAME = "libcamera"
PROJECT_NUMBER = "@VERSION@"
PROJECT_BRIEF = "Supporting cameras in Linux since 2019"
OUTPUT_DIRECTORY = "@OUTPUT_DIR@"
STRIP_FROM_PATH = "@TOP_SRCDIR@"
ALIASES = "context=\xrefitem context \"Thread Safety\" \"Thread Safety\"" \
"threadbound=\ref thread-bound \"thread-bound\"" \
"threadsafe=\ref thread-safe \"thread-safe\""
EXTENSION_MAPPING = h=C++
TOC_INCLUDE_HEADINGS = 0
CASE_SENSE_NAMES = YES
QUIET = YES
WARN_AS_ERROR = @WARN_AS_ERROR@
INPUT = "@TOP_SRCDIR@/include/libcamera" \
"@TOP_SRCDIR@/src/ipa/ipu3" \
"@TOP_SRCDIR@/src/ipa/libipa" \
"@TOP_SRCDIR@/src/libcamera" \
"@TOP_BUILDDIR@/include/libcamera" \
"@TOP_BUILDDIR@/src/libcamera"
FILE_PATTERNS = *.c \
*.cpp \
*.h
RECURSIVE = YES
EXCLUDE = @TOP_SRCDIR@/include/libcamera/base/span.h \
@TOP_SRCDIR@/include/libcamera/internal/device_enumerator_sysfs.h \
@TOP_SRCDIR@/include/libcamera/internal/device_enumerator_udev.h \
@TOP_SRCDIR@/include/libcamera/internal/ipc_pipe_unixsocket.h \
@TOP_SRCDIR@/src/libcamera/device_enumerator_sysfs.cpp \
@TOP_SRCDIR@/src/libcamera/device_enumerator_udev.cpp \
@TOP_SRCDIR@/src/libcamera/ipc_pipe_unixsocket.cpp \
@TOP_SRCDIR@/src/libcamera/pipeline/ \
@TOP_SRCDIR@/src/libcamera/tracepoints.cpp \
@TOP_BUILDDIR@/include/libcamera/internal/tracepoints.h \
@TOP_BUILDDIR@/include/libcamera/ipa/soft_ipa_interface.h \
@TOP_BUILDDIR@/src/libcamera/proxy/
EXCLUDE_PATTERNS = @TOP_BUILDDIR@/include/libcamera/ipa/*_serializer.h \
@TOP_BUILDDIR@/include/libcamera/ipa/*_proxy.h \
@TOP_BUILDDIR@/include/libcamera/ipa/ipu3_*.h \
@TOP_BUILDDIR@/include/libcamera/ipa/raspberrypi_*.h \
@TOP_BUILDDIR@/include/libcamera/ipa/rkisp1_*.h \
@TOP_BUILDDIR@/include/libcamera/ipa/vimc_*.h
EXCLUDE_SYMBOLS = libcamera::BoundMethodArgs \
libcamera::BoundMethodBase \
libcamera::BoundMethodFunctor \
libcamera::BoundMethodMember \
libcamera::BoundMethodPack \
libcamera::BoundMethodPackBase \
libcamera::BoundMethodStatic \
libcamera::CameraManager::Private \
libcamera::SignalBase \
libcamera::ipa::AlgorithmFactoryBase \
*::details \
std::*
EXCLUDE_SYMLINKS = YES
HTML_OUTPUT = api-html
GENERATE_LATEX = NO
MACRO_EXPANSION = YES
EXPAND_ONLY_PREDEF = YES
INCLUDE_PATH = "@TOP_SRCDIR@/include/libcamera"
INCLUDE_FILE_PATTERNS = *.h
IMAGE_PATH = "@TOP_SRCDIR@/Documentation/images"
PREDEFINED = __DOXYGEN__ \
__cplusplus \
__attribute__(x)= \
@PREDEFINED@
HAVE_DOT = YES

View File

@@ -0,0 +1,8 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _api:
API
===
:: Placeholder for Doxygen documentation

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 194 KiB

View File

@@ -0,0 +1,173 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _camera-sensor-model:
.. todo: Move to Doxygen-generated documentation
The libcamera camera sensor model
=================================
libcamera defines an abstract camera sensor model in order to provide
a description of each of the processing steps that result in image data being
sent on the media bus and that form the image stream delivered to applications.
Applications should use the abstract camera sensor model defined here to
precisely control the operations of the camera sensor.
The libcamera camera sensor model targets image sensors producing frames in
RAW format, delivered through a MIPI CSI-2 compliant bus implementation.
The abstract sensor model maps libcamera components to the characteristics and
operations of an image sensor, and serves as a reference to model the libcamera
CameraSensor class and SensorConfiguration classes and operations.
In order to control the configuration of the camera sensor through the
SensorConfiguration class, applications should understand this model and map it
to the combination of image sensor and kernel driver in use.
The camera sensor model defined here is based on the *MIPI CCS specification*,
particularly on *Section 8.2 - Image readout* of *Chapter 8 - Video Timings*.
Glossary
--------
.. glossary::
Pixel array
The full grid of pixels, active and inactive ones
Pixel array active area
The portion(s) of the pixel array that contains valid and readable pixels;
corresponds to the libcamera properties::PixelArrayActiveAreas
Analog crop rectangle
The portion of the *pixel array active area* which is read out and passed
to further processing stages
Subsampling
Pixel processing techniques that reduce the image size by binning or by
skipping adjacent pixels
Digital crop
Crop of the sub-sampled image data before scaling
Frame output
The frame (image) as output on the media bus by the camera sensor
Camera sensor model
-------------------
The abstract sensor model is described in the following diagram.
.. figure:: sensor_model.svg
1. The sensor reads pixels from the *pixel array*. The pixels being read out are
selected by the *analog crop rectangle*.
2. The pixels can be subsampled to reduce the image size without affecting the
field of view. Two subsampling techniques can be used:
- Binning: combines adjacent pixels of the same colour by averaging or
summing their values, in the analog domain and/or the digital domain.
.. figure:: binning.svg
- Skipping: skips the read out of a number of adjacent pixels.
.. figure:: skipping.svg
3. The output of the optional sub-sampling stage is then cropped after the
conversion of the analogue pixel values in the digital domain.
4. The resulting output frame is sent on the media bus by the sensor.
Camera Sensor configuration parameters
--------------------------------------
The libcamera camera sensor model defines parameters that allow users to
control:
1. The image format bit depth
2. The size and position of the *Analog crop rectangle*
3. The subsampling factors used to downscale the pixel array readout data to a
smaller frame size without reducing the image *field of view*. Two
configuration parameters are made available to control the downscaling
factor:
- binning
A vertical and horizontal binning factor can be specified, the image
will be downscaled in its vertical and horizontal sizes by the specified
factor.
.. code-block:: c
:caption: Definition: The horizontal and vertical binning factors
horizontal_binning = xBin;
vertical_binning = yBin;
- skipping
Skipping reduces the image resolution by skipping the read-out of a number
of adjacent pixels. The skipping factor is specified by the 'increment'
number (number of pixels to 'skip') in the vertical and horizontal
directions and for even and odd rows and columns.
.. code-block:: c
:caption: Definition: The horizontal and vertical skipping factors
horizontal_skipping = (xOddInc + xEvenInc) / 2;
vertical_skipping = (yOddInc + yEvenInc) / 2;
Different sensors perform the binning and skipping stages in different
orders. For the sake of computing the final output image size the order of
execution is not relevant. The overall down-scaling factor is obtained by
combining the binning and skipping factors.
.. code-block:: c
:caption: Definition: The total scaling factor (binning + sub-sampling)
total_horizontal_downscale = horizontal_binning + horizontal_skipping;
total_vertical_downscale = vertical_binning + vertical_skipping;
4. The output size is used to specify any additional cropping on the sub-sampled
frame.
5. The total line length and frame height (*visibile* pixels + *blankings*) as
sent on the MIPI CSI-2 bus.
6. The pixel transmission rate on the MIPI CSI-2 bus.
The above parameters are combined to obtain the following high-level
configurations:
- **frame output size**
Obtained by applying a crop to the physical pixel array size in the analog
domain, followed by optional binning and sub-sampling (in any order),
followed by an optional crop step in the output digital domain.
- **frame rate**
The combination of the *total frame size*, the image format *bit depth* and
the *pixel rate* of the data sent on the MIPI CSI-2 bus allows to compute the
image stream frame rate. The equation is the well known:
.. code-block:: c
frame_duration = total_frame_size / pixel_rate;
frame_rate = 1 / frame_duration;
where the *pixel_rate* parameter is the result of the sensor's configuration
of the MIPI CSI-2 bus *(the following formula applies to MIPI CSI-2 when
used on MIPI D-PHY physical protocol layer only)*
.. code-block:: c
pixel_rate = csi_2_link_freq * 2 * nr_of_lanes / bits_per_sample;

View File

@@ -0,0 +1,94 @@
.. SPDX-License-Identifier: CC-BY-4.0
.. _code-of-conduct:
Contributor Covenant Code of Conduct
====================================
Our Pledge
----------
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
Our Standards
-------------
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
Our Responsibilities
--------------------
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
Scope
-----
This Code of Conduct applies within all project spaces, and it also applies when
an individual is representing the project or its community in public spaces.
Examples of representing a project or community include using an official
project e-mail address, posting via an official social media account, or acting
as an appointed representative at an online or offline event. Representation of
a project may be further defined and clarified by project maintainers.
Enforcement
-----------
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at conduct@libcamera.org, or directly to
any member of the code of conduct team:
* Kieran Bingham <kieran.bingham@ideasonboard.com>
* Laurent Pinchart <laurent.pinchart@ideasonboard.com>
All complaints will be reviewed and investigated and will result in a response
that is deemed necessary and appropriate to the circumstances. The project team
is obligated to maintain confidentiality with regard to the reporter of an
incident. Further details of specific enforcement policies may be posted
separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
Attribution
-----------
This Code of Conduct is adapted from the `Contributor Covenant`_, version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
.. _Contributor Covenant: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

View File

@@ -0,0 +1,429 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _coding-style-guidelines:
Coding Style Guidelines
=======================
These coding guidelines are meant to ensure code quality. As a contributor
you are expected to follow them in all code submitted to the project. While
strict compliance is desired, exceptions are tolerated when justified with
good reasons. Please read the whole coding guidelines and use common sense
to decide when departing from them is appropriate.
libcamera is written in C++, a language that has seen many revisions and
offers an extensive set of features that are easy to abuse. These coding
guidelines establish the subset of C++ used by the project.
Coding Style
------------
Even if the programming language in use is different, the project embraces the
`Linux Kernel Coding Style`_ with a few exception and some C++ specificities.
.. _Linux Kernel Coding Style: https://www.kernel.org/doc/html/latest/process/coding-style.html
In particular, from the kernel style document, the following section are adopted:
* 1 "Indentation"
* 2 "Breaking Long Lines" striving to fit code within 80 columns and
accepting up to 120 columns when necessary
* 3 "Placing Braces and Spaces"
* 3.1 "Spaces"
* 8 "Commenting" with the exception that in-function comments are not
always un-welcome.
While libcamera uses the kernel coding style for all typographic matters, the
project is a user space library, developed in a different programming language,
and the kernel guidelines fall short for this use case.
For this reason, rules and guidelines from the `Google C++ Style Guide`_ have
been adopted as well as most coding principles specified therein, with a
few exceptions and relaxed limitations on some subjects.
.. _Google C++ Style Guide: https://google.github.io/styleguide/cppguide.html
The following exceptions apply to the naming conventions specified in the
document:
* File names: libcamera uses the .cpp extensions for C++ source files and
the .h extension for header files
* Variables, function parameters, function names and class members use
camel case style, with the first letter in lower-case (as in 'camelCase'
and not 'CamelCase')
* Types (classes, structs, type aliases, and type template parameters) use
camel case, with the first letter in capital case (as in 'CamelCase' and
not 'camelCase')
* Enum members use 'CamelCase', while macros are in capital case with
underscores in between
* All formatting rules specified in the selected sections of the Linux kernel
Code Style for indentation, braces, spacing, etc
* Headers are guarded by the use of '#pragma once'
Order of Includes
~~~~~~~~~~~~~~~~~
Headers shall be included at the beginning of .c, .cpp and .h files, right
after the file description comment block and, for .h files, the header guard
macro. For .cpp files, if the file implements an API declared in a header file,
that header file shall be included first in order to ensure it is
self-contained.
While the following list is extensive, it documents the expected behaviour
defined by the clang-format configuration and tooling should assist with
ordering.
The headers shall be grouped and ordered as follows:
1. The header declaring the API being implemented (if any)
2. The C and C++ system and standard library headers
3. Linux kernel headers
4. The libcamera base private header if required
5. The libcamera base library headers
6. The libcamera public API headers
7. The libcamera IPA interfaces
8. The internal libcamera headers
9. Other libraries' headers, with one group per library
10. Local headers grouped by subdirectory
11. Any local headers
Groups of headers shall be separated by a single blank line. Headers within
each group shall be sorted alphabetically.
System and library headers shall be included with angle brackets. Project
headers shall be included with angle brackets for the libcamera public API
headers, and with double quotes for internal libcamera headers.
C++ Specific Rules
------------------
The code shall be implemented in C++17, with the following caveats:
* Type inference (auto and decltype) shall be used with caution, to avoid
drifting towards an untyped language.
* The explicit, override and final specifiers are to be used where applicable.
* Smart pointers, as well as shared pointers and weak pointers, shall not be
overused.
* Classes are encouraged to define move constructors and assignment operators
where applicable, and generally make use of the features offered by rvalue
references.
Object Ownership
~~~~~~~~~~~~~~~~
libcamera creates and destroys many objects at runtime, for both objects
internal to the library and objects exposed to the user. To guarantee proper
operation without use after free, double free or memory leaks, knowing who owns
each object at any time is crucial. The project has enacted a set of rules to
make object ownership tracking as explicit and fool-proof as possible.
In the context of this section, the terms object and instance are used
interchangeably and both refer to an instance of a class. The term reference
refers to both C++ references and C++ pointers in their capacity to refer to an
object. Passing a reference means offering a way to a callee to obtain a
reference to an object that the caller has a valid reference to. Borrowing a
reference means using a reference passed by a caller without ownership transfer
based on the assumption that the caller guarantees the validity of the
reference for the duration of the operation that borrows it.
1. Single Owner Objects
* By default an object has a single owner at any time.
* Storage of single owner objects varies depending on how the object
ownership will evolve through the lifetime of the object.
* Objects whose ownership needs to be transferred shall be stored as
std::unique_ptr<> as much as possible to emphasize the single ownership.
* Objects whose owner doesn't change may be embedded in other objects, or
stored as pointer or references. They may be stored as std::unique_ptr<>
for automatic deletion if desired.
* Ownership is transferred by passing the reference as a std::unique_ptr<>
and using std::move(). After ownership transfer the former owner has no
valid reference to the object anymore and shall not access it without first
obtaining a valid reference.
* Objects may be borrowed by passing an object reference from the owner to
the borrower, providing that
* the owner guarantees the validity of the reference for the whole duration
of the borrowing, and
* the borrower doesn't access the reference after the end of the borrowing.
When borrowing from caller to callee for the duration of a function call,
this implies that the callee shall not keep any stored reference after it
returns. These rules apply to the callee and all the functions it calls,
directly or indirectly.
When the object is stored in a std::unique_ptr<>, borrowing passes a
reference to the object, not to the std::unique_ptr<>, as
* a 'const &' when the object doesn't need to be modified and may not be
null.
* a pointer when the object may be modified or may be null. Unless
otherwise specified, pointers passed to functions are considered as
borrowed references valid for the duration of the function only.
2. Shared Objects
* Objects that may have multiple owners at a given time are called shared
objects. They are reference-counted and live as long as any references to
the object exist.
* Shared objects are created with std::make_shared<> or
std::allocate_shared<> and stored in an std::shared_ptr<>.
* Ownership is shared by creating and passing copies of any valid
std::shared_ptr<>. Ownership is released by destroying the corresponding
std::shared_ptr<>.
* When passed to a function, std::shared_ptr<> are always passed by value,
never by reference. The caller can decide whether to transfer its ownership
of the std::shared_ptr<> with std::move() or retain it. The callee shall
use std::move() if it needs to store the shared pointer.
* Do not over-use std::move(), as it may prevent copy-elision. In particular
a function returning a std::shared_ptr<> value shall not use std::move() in
its return statements, and its callers shall not wrap the function call
with std::move().
* Borrowed references to shared objects are passed as references to the
objects themselves, not to the std::shared_ptr<>, with the same rules as
for single owner objects.
These rules match the `object ownership rules from the Chromium C++ Style Guide`_.
.. _object ownership rules from the Chromium C++ Style Guide: https://chromium.googlesource.com/chromium/src/+/master/styleguide/c++/c++.md#object-ownership-and-calling-conventions
.. attention:: Long term borrowing of single owner objects is allowed. Example
use cases are implementation of the singleton pattern (where the singleton
guarantees the validity of the reference forever), or returning references
to global objects whose lifetime matches the lifetime of the application. As
long term borrowing isn't marked through language constructs, it shall be
documented explicitly in details in the API.
Global Variables
~~~~~~~~~~~~~~~~
The order of initializations and destructions of global variables cannot be
reasonably controlled. This can cause problems (including segfaults) when global
variables depend on each other, directly or indirectly. For example, if the
declaration of a global variable calls a constructor which uses another global
variable that hasn't been initialized yet, incorrect behavior is likely.
Similar issues may occur when the library is unloaded and global variables are
destroyed.
Global variables that are statically initialized and have trivial destructors
(such as an integer constant) do not cause any issue. Other global variables
shall be avoided when possible, but are allowed when required (for instance to
implement factories with auto-registration). They shall not depend on any other
global variable, should run a minimal amount of code in the constructor and
destructor, and code that contains dependencies should be moved to a later
point in time.
Error Handling
~~~~~~~~~~~~~~
Proper error handling is crucial to the stability of libcamera. The project
follows a set of high-level rules:
* Make errors impossible through API design. The best way to handle errors is
to prevent them from happening in the first place. The preferred option is
thus to prevent error conditions at the API design stage when possible.
* Detect errors at compile time. Compile-test checking of errors not only
reduces the runtime complexity, but also ensures that errors are caught early
on during development instead of during testing or, worse, in production. The
static_assert() declaration should be used where possible for this purpose.
* Validate all external API contracts. Explicit pre-condition checks shall be
used to validate API contracts. Whenever possible, appropriate errors should
be returned directly. As libcamera doesn't use exceptions, errors detected in
constructors shall result in the constructed object being marked as invalid,
with a public member function available to check validity. The checks should
be thorough for the public API, and may be lighter for internal APIs when
pre-conditions can reasonably be considered to be met through other means.
* Use assertions for fatal issues only. The ASSERT() macro causes a program
abort when compiled in debug mode, and is a no-op otherwise. It is useful to
abort execution synchronously with the error check instead of letting the
error cause problems (such as segmentation faults) later, and to provide a
detailed backtrace. Assertions shall only be used to catch conditions that are
never supposed to happen without a serious bug in libcamera that would prevent
safe recovery. They shall never be used to validate API contracts. The
assertion conditions shall not cause any side effect as they are compiled out
in non-debug mode.
C Compatibility Headers
~~~~~~~~~~~~~~~~~~~~~~~
The C++ standard defines a set of C++ standard library headers, and for some of
them, defines C compatibility headers. The former have a name of the form
<cxxx> while the later are named <xxx.h>. The C++ headers declare names in the
std namespace, and may declare the same names in the global namespace. The C
compatibility headers declare names in the global namespace, and may declare
the same names in the std namespace. Code shall not rely on the optional
declaration of names in the global or std namespace.
Usage of the C compatibility headers is preferred, except for the math.h header.
Where math.h defines separate functions for different argument types (e.g.
abs(int), labs(long int), fabs(double) and fabsf(float)) and requires the
developer to pick the right function, cmath defines overloaded functions
(std::abs(int), std::abs(long int), std::abs(double) and std::abs(float) to let
the compiler select the right function. This avoids potential errors such as
calling abs(int) with a float argument, performing an unwanted implicit integer
conversion. For this reason, cmath is preferred over math.h.
Documentation
-------------
All public and protected classes, structures, enumerations, macros, functions
and variables shall be documented with a Doxygen comment block, using the
Javadoc style with C-style comments. When documenting private member functions
and variables the same Doxygen style shall be used as for public and protected
members.
Documentation relates to header files, but shall be stored in the .cpp source
files in order to group the implementation and documentation. Every documented
header file shall have a \file documentation block in the .cpp source file.
The following comment block shows an example of correct documentation for a
member function of the PipelineHandler class.
::
/**
* \fn PipelineHandler::start()
* \brief Start capturing from a group of streams
* \param[in] camera The camera to start
*
* Start the group of streams that have been configured for capture by
* \a configureStreams(). The intended caller of this function is the Camera
* class which will in turn be called from the application to indicate that
* it has configured the streams and is ready to capture.
*
* \return 0 on success or a negative error code otherwise
*/
The comment block shall be placed right before the function it documents. If
the function is defined inline in the class definition in the header file, the
comment block shall be placed alone in the .cpp source file in the same order
as the function definitions in the header file and shall start with an \fn
line. Otherwise no \fn line shall be present.
The \brief directive shall be present. If the function takes parameters, \param
directives shall be present, with the appropriate [in], [out] or [inout]
specifiers. Only when the direction of the parameters isn't known (for instance
when defining a template function with variadic arguments) the direction
specifier shall be omitted. The \return directive shall be present when the
function returns a value, and shall be omitted otherwise.
The long description is optional. When present it shall be surrounded by empty
lines and may span multiple paragraphs. No blank lines shall otherwise be added
between the \fn, \brief, \param and \return directives.
Tools
-----
The 'clang-format' code formatting tool can be used to reformat source files
with the libcamera coding style, defined in the .clang-format file at the root
of the source tree.
As clang-format is a code formatter, it operates on full files and outputs
reformatted source code. While it can be used to reformat code before sending
patches, it may generate unrelated changes. To avoid this, libcamera provides a
'checkstyle.py' script wrapping the formatting tools to only retain related
changes. This should be used to validate modifications before submitting them
for review.
The script operates on one or multiple git commits specified on the command
line. It does not modify the git tree, the index or the working directory and
is thus safe to run at any point.
Commits are specified using the same revision range syntax as 'git log'. The
most usual use cases are to specify a single commit by sha1, branch name or tag
name, or a commit range with the <from>..<to> syntax. When no arguments are
given, the topmost commit of the current branch is selected.
::
$ ./utils/checkstyle.py cc7d204b2c51
----------------------------------------------------------------------------------
cc7d204b2c51853f7d963d144f5944e209e7ea29 libcamera: Use the logger instead of cout
----------------------------------------------------------------------------------
No style issue detected
When operating on a range of commits, style checks are performed on each commit
from oldest to newest.
::
$ ../utils/checkstyle.py 3b56ddaa96fb~3..3b56ddaa96fb
----------------------------------------------------------------------------------
b4351e1a6b83a9cfbfc331af3753602a02dbe062 libcamera: log: Fix Doxygen documentation
----------------------------------------------------------------------------------
No style issue detected
--------------------------------------------------------------------------------------
6ab3ff4501fcfa24db40fcccbce35bdded7cd4bc libcamera: log: Document the LogMessage class
--------------------------------------------------------------------------------------
No style issue detected
---------------------------------------------------------------------------------
3b56ddaa96fbccf4eada05d378ddaa1cb6209b57 build: Add 'std=c++11' cpp compiler flag
---------------------------------------------------------------------------------
Commit doesn't touch source files, skipping
Commits that do not touch any .c, .cpp or .h files are skipped.
::
$ ./utils/checkstyle.py edbd2059d8a4
----------------------------------------------------------------------
edbd2059d8a4bd759302ada4368fa4055638fd7f libcamera: Add initial logger
----------------------------------------------------------------------
--- src/libcamera/include/log.h
+++ src/libcamera/include/log.h
@@ -21,11 +21,14 @@
{
public:
LogMessage(const char *fileName, unsigned int line,
- LogSeverity severity);
- LogMessage(const LogMessage&) = delete;
+ LogSeverity severity);
+ LogMessage(const LogMessage &) = delete;
~LogMessage();
- std::ostream& stream() { return msgStream; }
+ std::ostream &stream()
+ {
+ return msgStream;
+ }
private:
std::ostringstream msgStream;
--- src/libcamera/log.cpp
+++ src/libcamera/log.cpp
@@ -42,7 +42,7 @@
static const char *log_severity_name(LogSeverity severity)
{
- static const char * const names[] = {
+ static const char *const names[] = {
"INFO",
"WARN",
" ERR",
---
2 potential style issues detected, please review
When potential style issues are detected, they are displayed in the form of a
diff that fixes the issues, on top of the corresponding commit. As the script is
in early development false positive are expected. The flagged issues should be
reviewed, but the diff doesn't need to be applied blindly.
Execution of checkstyle.py can be automated through git commit hooks. Example
of pre-commit and post-commit hooks are available in `utils/hooks/pre-commit`
and `utils/hooks/post-commit`. You can install either hook by copying it to
`.git/hooks/`. The post-commit hook is easier to start with as it will only flag
potential issues after committing, while the pre-commit hook will abort the
commit if issues are detected and requires usage of `git commit --no-verify` to
ignore false positives.
Happy hacking, libcamera awaits your patches!

View File

@@ -0,0 +1,172 @@
# SPDX-License-Identifier: CC-BY-SA-4.0
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'libcamera'
copyright = '2018-2019, The libcamera documentation authors'
author = u'Kieran Bingham, Jacopo Mondi, Laurent Pinchart, Niklas Söderlund'
# Version information is provided by the build environment, through the
# sphinx command line.
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'theme'
html_theme_path = ['.']
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'libcameradoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'libcamera.tex', 'libcamera Documentation',
author, 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'libcamera', 'libcamera Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'libcamera', 'libcamera Documentation',
author, 'libcamera', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']

View File

@@ -0,0 +1,142 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Contributing
============
libcamera is developed as a free software project and welcomes contributors.
Whether you would like to help with coding, documentation, testing, proposing
new features, or just discussing the project with the community, you can join
our official public communication channels, or simply check out the code.
The project adheres to a :ref:`code of conduct <code-of-conduct>` that
maintainers, contributors and community members are expected to follow in all
online and offline communication.
Mailing List
------------
We use a public mailing list as our main means of communication. You can find
subscription information and the messages archive on the `libcamera-devel`_
list information page.
.. _libcamera-devel: https://lists.libcamera.org/listinfo/libcamera-devel
IRC Channel
-----------
For informal and real time discussions, our IRC channel on irc.oftc.net is open
to the public. Point your IRC client to #libcamera to say hello, or use the
`WebChat`_.
.. _WebChat: https://webchat.oftc.net/?channels=libcamera
Source Code
-----------
libcamera is in early stages of development, and no releases are available yet.
The source code is available from the project's `git tree`_.
.. code-block:: shell
$ git clone https://git.libcamera.org/libcamera/libcamera.git
.. _git tree: https://git.libcamera.org/libcamera/libcamera.git/
A mirror is also hosted on `LinuxTV`_.
.. _LinuxTV: https://git.linuxtv.org/libcamera.git/
Issue Tracker
-------------
Our `issue tracker`_ tracks all bugs, issues and feature requests. All issues
are publicly visible, and you can register for an account to create new issues.
.. _issue tracker: https://bugs.libcamera.org/
Documentation
-------------
Project documentation is created using `Sphinx`_. Source level documentation
uses `Doxygen`_. Please make sure to document all code during development.
.. _Sphinx: https://www.sphinx-doc.org
.. _Doxygen: https://www.doxygen.nl
Submitting Patches
------------------
The libcamera project has high standards of stability, efficiency and
reliability. To achieve those, the project goes to great length to produce
code that is as easy to read, understand and maintain as possible. This is
made possible by a set of :ref:`coding-style-guidelines` that all submissions
are expected to follow.
We also care about the quality of commit messages. A good commit message not
only describes what a commit does, but why it does so. By conveying clear
information about the purpose of the commit, it helps speeding up reviews.
Regardless of whether you're new to git or have years of experience,
https://cbea.ms/git-commit/ is always a good guide to read to improve your
commit message writing skills.
The patch submission process for libcamera is similar to the Linux kernel, and
goes through the `libcamera-devel`_ mailing list. If you have no previous
experience with ``git-send-email``, or just experience trouble configuring it
for your e-mail provider, the sourcehut developers have put together a detailed
guide available at https://git-send-email.io/.
Patches submitted to the libcamera project must be certified as suitable for
integration into an open source project. As such libcamera follows the same
model as utilised by the Linux kernel, and requires the use of 'Signed-off-by:'
tags in all patches.
By signing your contributions you are certifying your work in accordance with
the following:
`Developer's Certificate of Origin`_
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
1 Letterman Drive
Suite D4700
San Francisco, CA, 94129
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
.. _Developer's Certificate of Origin: https://developercertificate.org/
.. toctree::
:hidden:
Code of Conduct <code-of-conduct>
Coding Style <coding-style>

View File

@@ -0,0 +1,400 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. contents::
:local:
*************
Documentation
*************
.. toctree::
:hidden:
API <api-html/index>
API
===
The libcamera API is extensively documented using Doxygen. The :ref:`API
nightly build <api>` contains the most up-to-date API documentation, built from
the latest master branch.
Feature Requirements
====================
Device enumeration
------------------
The library shall support enumerating all camera devices available in the
system, including both fixed cameras and hotpluggable cameras. It shall
support cameras plugged and unplugged after the initialization of the
library, and shall offer a mechanism to notify applications of camera plug
and unplug.
The following types of cameras shall be supported:
* Internal cameras designed for point-and-shoot still image and video
capture usage, either controlled directly by the CPU, or exposed through
an internal USB bus as a UVC device.
* External UVC cameras designed for video conferencing usage.
Other types of camera, including analog cameras, depth cameras, thermal
cameras, external digital picture or movie cameras, are out of scope for
this project.
A hardware device that includes independent camera sensors, such as front
and back sensors in a phone, shall be considered as multiple camera devices
for the purpose of this library.
Independent Camera Devices
--------------------------
When multiple cameras are present in the system and are able to operate
independently from each other, the library shall expose them as multiple
camera devices and support parallel operation without any additional usage
restriction apart from the limitations inherent to the hardware (such as
memory bandwidth, CPU usage or number of CSI-2 receivers for instance).
Independent processes shall be able to use independent cameras devices
without interfering with each other. A single camera device shall be
usable by a single process at a time.
Multiple streams support
------------------------
The library shall support multiple video streams running in parallel
for each camera device, within the limits imposed by the system.
Per frame controls
------------------
The library shall support controlling capture parameters for each stream
on a per-frame basis, on a best effort basis based on the capabilities of the
hardware and underlying software stack (including kernel drivers and
firmware). It shall apply capture parameters to the frame they target, and
report the value of the parameters that have effectively been used for each
captured frame.
When a camera device supports multiple streams, the library shall allow both
control of each stream independently, and control of multiple streams
together. Streams that are controlled together shall be synchronized. No
synchronization is required for streams controlled independently.
Capability Enumeration
----------------------
The library shall expose capabilities of each camera device in a way that
allows applications to discover those capabilities dynamically. Applications
shall be allowed to cache capabilities for as long as they are using the
library. If capabilities can change at runtime, the library shall offer a
mechanism to notify applications of such changes. Applications shall not
cache capabilities in long term storage between runs.
Capabilities shall be discovered dynamically at runtime from the device when
possible, and may come, in part or in full, from platform configuration
data.
Device Profiles
---------------
The library may define different camera device profiles, each with a minimum
set of required capabilities. Applications may use those profiles to quickly
determine the level of features exposed by a device without parsing the full
list of capabilities. Camera devices may implement additional capabilities
on top of the minimum required set for the profile they expose.
3A and Image Enhancement Algorithms
-----------------------------------
The camera devices shall implement auto exposure, auto gain and auto white
balance. Camera devices that include a focus lens shall implement auto
focus. Additional image enhancement algorithms, such as noise reduction or
video stabilization, may be implemented.
All algorithms may be implemented in hardware or firmware outside of the
library, or in software in the library. They shall all be controllable by
applications.
The library shall be architectured to isolate the 3A and image enhancement
algorithms in a component with a documented API, respectively called the 3A
component and the 3A API. The 3A API shall be stable, and shall allow both
open-source and closed-source implementations of the 3A component.
The library may include statically-linked open-source 3A components, and
shall support dynamically-linked open-source and closed-source 3A
components.
Closed-source 3A Component Sandboxing
-------------------------------------
For security purposes, it may be desired to run closed-source 3A components
in a separate process. The 3A API would in such a case be transported over
IPC. The 3A API shall make it possible to use any IPC mechanism that
supports passing file descriptors.
The library may implement an IPC mechanism, and shall support third-party
platform-specific IPC mechanisms through the implementation of a
platform-specific 3A API wrapper. No modification to the library shall be
needed to use such third-party IPC mechanisms.
The 3A component shall not directly access any device node on the system.
Such accesses shall instead be performed through the 3A API. The library
shall validate all accesses and restrict them to what is absolutely required
by 3A components.
V4L2 Compatibility Layer
------------------------
The project shall support traditional V4L2 application through an additional
libcamera wrapper library. The wrapper library shall trap all accesses to
camera devices through `LD_PRELOAD`, and route them through libcamera to
emulate a high-level V4L2 camera device. It shall expose camera device
features on a best-effort basis, and aim for the level of features
traditionally available from a UVC camera designed for video conferencing.
Android Camera HAL v3 Compatibility
-----------------------------------
The library API shall expose all the features required to implement an
Android Camera HAL v3 on top of libcamera. Some features of the HAL may be
omitted as long as they can be implemented separately in the HAL, such as
JPEG encoding, or YUV reprocessing.
Camera Stack
============
::
a c / +-------------+ +-------------+ +-------------+ +-------------+
p a | | Native | | Framework | | Native | | Android |
p t | | V4L2 | | Application | | libcamera | | Camera |
l i | | Application | | (gstreamer) | | Application | | Framework |
i o \ +-------------+ +-------------+ +-------------+ +-------------+
n ^ ^ ^ ^
| | | |
l a | | | |
i d v v | v
b a / +-------------+ +-------------+ | +-------------+
c p | | V4L2 | | Camera | | | Android |
a t | | Compat. | | Framework | | | Camera |
m a | | | | (gstreamer) | | | HAL |
e t \ +-------------+ +-------------+ | +-------------+
r i ^ ^ | ^
a o | | | |
n | | | |
/ | ,................................................
| | ! : Language : !
l f | | ! : Bindings : !
i r | | ! : (optional) : !
b a | | \...............................................'
c m | | | | |
a e | | | | |
m w | v v v v
e o | +----------------------------------------------------------------+
r r | | |
a k | | libcamera |
| | |
\ +----------------------------------------------------------------+
^ ^ ^
Userspace | | |
------------------------ | ---------------- | ---------------- | ---------------
Kernel | | |
v v v
+-----------+ +-----------+ +-----------+
| Media | <--> | Video | <--> | V4L2 |
| Device | | Device | | Subdev |
+-----------+ +-----------+ +-----------+
The camera stack comprises four software layers. From bottom to top:
* The kernel drivers control the camera hardware and expose a
low-level interface to userspace through the Linux kernel V4L2
family of APIs (Media Controller API, V4L2 Video Device API and
V4L2 Subdev API).
* The libcamera framework is the core part of the stack. It
handles all control of the camera devices in its core component,
libcamera, and exposes a native C++ API to upper layers. Optional
language bindings allow interfacing to libcamera from other
programming languages.
Those components live in the same source code repository and
all together constitute the libcamera framework.
* The libcamera adaptation is an umbrella term designating the
components that interface to libcamera in other frameworks.
Notable examples are a V4L2 compatibility layer, a gstreamer
libcamera element, and an Android camera HAL implementation based
on libcamera.
Those components can live in the libcamera project source code
in separate repositories, or move to their respective project's
repository (for instance the gstreamer libcamera element).
* The applications and upper level frameworks are based on the
libcamera framework or libcamera adaptation, and are outside of
the scope of the libcamera project.
libcamera Architecture
======================
::
---------------------------< libcamera Public API >---------------------------
^ ^
| |
v v
+-------------+ +-------------------------------------------------+
| Camera | | Camera Device |
| Devices | | +---------------------------------------------+ |
| Manager | | | Device-Agnostic | |
+-------------+ | | | |
^ | | +------------------------+ |
| | | | ~~~~~~~~~~~~~~~~~~~~~ |
| | | | { +---------------+ } |
| | | | } | ////Image//// | { |
| | | | <-> | /Processing// | } |
| | | | } | /Algorithms// | { |
| | | | { +---------------+ } |
| | | | ~~~~~~~~~~~~~~~~~~~~~ |
| | | | ======================== |
| | | | +---------------+ |
| | | | | //Pipeline/// | |
| | | | <-> | ///Handler/// | |
| | | | | ///////////// | |
| | +--------------------+ +---------------+ |
| | Device-Specific |
| +-------------------------------------------------+
| ^ ^
| | |
v v v
+--------------------------------------------------------------------+
| Helpers and Support Classes |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| | MC & V4L2 | | Buffers | | Sandboxing | | Plugins | |
| | Support | | Allocator | | IPC | | Manager | |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| +-------------+ +-------------+ |
| | Pipeline | | ... | |
| | Runner | | | |
| +-------------+ +-------------+ |
+--------------------------------------------------------------------+
/// Device-Specific Components
~~~ Sandboxing
While offering a unified API towards upper layers, and presenting
itself as a single library, libcamera isn't monolithic. It exposes
multiple components through its public API, is built around a set of
separate helpers internally, uses device-specific components and can
load dynamic plugins.
Camera Devices Manager
The Camera Devices Manager provides a view of available cameras
in the system. It performs cold enumeration and runtime camera
management, and supports a hotplug notification mechanism in its
public API.
To avoid the cost associated with cold enumeration of all devices
at application start, and to arbitrate concurrent access to camera
devices, the Camera Devices Manager could later be split to a
separate service, possibly with integration in platform-specific
device management.
Camera Device
The Camera Device represents a camera device to upper layers. It
exposes full control of the device through the public API, and is
thus the highest level object exposed by libcamera.
Camera Device instances are created by the Camera Devices
Manager. An optional function to create new instances could be exposed
through the public API to speed up initialization when the upper
layer knows how to directly address camera devices present in the
system.
Pipeline Handler
The Pipeline Handler manages complex pipelines exposed by the kernel drivers
through the Media Controller and V4L2 APIs. It abstracts pipeline handling to
hide device-specific details to the rest of the library, and implements both
pipeline configuration based on stream configuration, and pipeline runtime
execution and scheduling when needed by the device.
This component is device-specific and is part of the libcamera code base. As
such it is covered by the same free software license as the rest of libcamera
and needs to be contributed upstream by device vendors. The Pipeline Handler
lives in the same process as the rest of the library, and has access to all
helpers and kernel camera-related devices.
Image Processing Algorithms
Together with the hardware image processing and hardware statistics
collection, the Image Processing Algorithms implement 3A (Auto-Exposure,
Auto-White Balance and Auto-Focus) and other algorithms. They run on the CPU
and interact with the kernel camera devices to control hardware image
processing based on the parameters supplied by upper layers, closing the
control loop of the ISP.
This component is device-specific and is loaded as an external plugin. It can
be part of the libcamera code base, in which case it is covered by the same
license, or provided externally as an open-source or closed-source component.
The component is sandboxed and can only interact with libcamera through
internal APIs specifically marked as such. In particular it will have no
direct access to kernel camera devices, and all its accesses to image and
metadata will be mediated by dmabuf instances explicitly passed to the
component. The component must be prepared to run in a process separate from
the main libcamera process, and to have a very restricted view of the system,
including no access to networking APIs and limited access to file systems.
The sandboxing mechanism isn't defined by libcamera. One example
implementation will be provided as part of the project, and platforms vendors
will be able to provide their own sandboxing mechanism as a plugin.
libcamera should provide a basic implementation of Image Processing
Algorithms, to serve as a reference for the internal API. Device vendors are
expected to provide a full-fledged implementation compatible with their
Pipeline Handler. One goal of the libcamera project is to create an
environment in which the community will be able to compete with the
closed-source vendor binaries and develop a high quality open source
implementation.
Helpers and Support Classes
While Pipeline Handlers are device-specific, implementations are expected to
share code due to usage of identical APIs towards the kernel camera drivers
and the Image Processing Algorithms. This includes without limitation handling
of the MC and V4L2 APIs, buffer management through dmabuf, and pipeline
discovery, configuration and scheduling. Such code will be factored out to
helpers when applicable.
Other parts of libcamera will also benefit from factoring code out to
self-contained support classes, even if such code is present only once in the
code base, in order to keep the source code clean and easy to read. This
should be the case for instance for plugin management.
V4L2 Compatibility Layer
------------------------
V4L2 compatibility is achieved through a shared library that traps all
accesses to camera devices and routes them to libcamera to emulate high-level
V4L2 camera devices. It is injected in a process address space through
`LD_PRELOAD` and is completely transparent for applications.
The compatibility layer exposes camera device features on a best-effort basis,
and aims for the level of features traditionally available from a UVC camera
designed for video conferencing.
Android Camera HAL
------------------
Camera support for Android is achieved through a generic Android
camera HAL implementation on top of libcamera. The HAL will implement internally
features required by Android and missing from libcamera, such as JPEG encoding
support.
The Android camera HAL implementation will initially target the
LIMITED hardware level, with support for the FULL level then being gradually
implemented.

View File

@@ -0,0 +1,164 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Environment variables
=====================
The libcamera behaviour can be tuned through environment variables. This
document lists all the available variables and describes their usage.
List of variables
-----------------
LIBCAMERA_LOG_FILE
The custom destination for log output.
Example value: ``/home/{user}/camera_log.log``
LIBCAMERA_LOG_LEVELS
Configure the verbosity of log messages for different categories (`more <Log levels_>`__).
Example value: ``*:DEBUG``
LIBCAMERA_LOG_NO_COLOR
Disable coloring of log messages (`more <Notes about debugging_>`__).
LIBCAMERA_IPA_CONFIG_PATH
Define custom search locations for IPA configurations (`more <IPA configuration_>`__).
Example value: ``${HOME}/.libcamera/share/ipa:/opt/libcamera/vendor/share/ipa``
LIBCAMERA_IPA_FORCE_ISOLATION
When set to a non-empty string, force process isolation of all IPA modules.
Example value: ``1``
LIBCAMERA_IPA_MODULE_PATH
Define custom search locations for IPA modules (`more <IPA module_>`__).
Example value: ``${HOME}/.libcamera/lib:/opt/libcamera/vendor/lib``
LIBCAMERA_PIPELINES_MATCH_LIST
Define an ordered list of pipeline names to be used to match the media
devices in the system. The pipeline handler names used to populate the
variable are the ones passed to the REGISTER_PIPELINE_HANDLER() macro in the
source code.
Example value: ``rkisp1,simple``
LIBCAMERA_RPI_CONFIG_FILE
Define a custom configuration file to use in the Raspberry Pi pipeline handler.
Example value: ``/usr/local/share/libcamera/pipeline/rpi/vc4/minimal_mem.yaml``
Further details
---------------
Notes about debugging
~~~~~~~~~~~~~~~~~~~~~
The environment variables ``LIBCAMERA_LOG_FILE``, ``LIBCAMERA_LOG_LEVELS`` and
``LIBCAMERA_LOG_NO_COLOR`` are used to modify the default configuration of the
libcamera logger.
By default, libcamera logs all messages to the standard error (std::cerr).
Messages are colored by default depending on the log level. Coloring can be
disabled by setting the ``LIBCAMERA_LOG_NO_COLOR`` environment variable.
The default log destination can also be directed to a file by setting the
``LIBCAMERA_LOG_FILE`` environment variable to the log file name. This also
disables coloring.
Log levels are controlled through the ``LIBCAMERA_LOG_LEVELS`` variable, which
accepts a comma-separated list of 'category:level' pairs.
The `level <Log levels_>`__ part is mandatory and can either be specified by
name or by numerical index associated with each level.
The optional `category <Log categories_>`__ is a string matching the categories
defined by each file in the source base using the logging infrastructure. It
can include a wildcard ('*') character at the end to match multiple categories.
For more information refer to the `API documentation <https://libcamera.org/api-html/log_8h.html#details>`__.
Examples:
Enable full debug output to a separate file, for every `category <Log categories_>`__
within a local environment:
.. code:: bash
:~$ LIBCAMERA_LOG_FILE='/tmp/example_log.log' \
LIBCAMERA_LOG_LEVELS=0 \
cam --list
Enable full debug output for the categories ``Camera`` and ``V4L2`` within a
global environment:
.. code:: bash
:~$ export LIBCAMERA_LOG_LEVELS='Camera:DEBUG,V4L2:DEBUG'
:~$ cam --list
Log levels
~~~~~~~~~~
This is the list of available log levels, notice that all levels below
the chosen one are printed, while those above are discarded.
- DEBUG (0)
- INFO (1)
- WARN (2)
- ERROR (3)
- FATAL (4)
Example:
If you choose WARN (2), you will be able to see WARN (2), ERROR (3) and FATAL (4)
but not DEBUG (0) and INFO (1).
Log categories
~~~~~~~~~~~~~~
Every category represents a specific area of the libcamera codebase,
the names can be located within the source code, for example:
`src/libcamera/camera_manager.cpp <https://git.libcamera.org/libcamera/libcamera.git/tree/src/libcamera/camera_manager.cpp#n35>`__
.. code:: cpp
LOG_DEFINE_CATEGORY(Camera)
There are two available macros used to assign a category name to a part of the
libcamera codebase:
LOG_DEFINE_CATEGORY
This macro is required, in order to use the ``LOGC`` macro for a particular
category. It can only be used once for each category. If you want to create
log messages within multiple compilation units for the same category utilize
the ``LOG_DECLARE_CATEGORY`` macro, in every file except the definition file.
LOG_DECLARE_CATEGORY
Used for sharing an already defined category between multiple separate
compilation units.
Both macros have to be used within the libcamera namespace of the C++ source
code.
IPA configuration
~~~~~~~~~~~~~~~~~
IPA modules use configuration files to store parameters. The format and
contents of the configuration files is specific to the IPA module. They usually
contain tuning parameters for the algorithms, in JSON format.
The ``LIBCAMERA_IPA_CONFIG_PATH`` variable can be used to specify custom
storage locations to search for those configuration files.
`Examples <https://git.libcamera.org/libcamera/libcamera.git/tree/src/ipa/rpi/vc4/data>`__
IPA module
~~~~~~~~~~
In order to locate the correct IPA module for your hardware, libcamera gathers
existing IPA modules from multiple locations. The default locations for this
operation are the installed system path (for example on Debian:
``/usr/local/x86_64-pc-linux-gnu/libcamera``) and the build directory.
With the ``LIBCAMERA_IPA_MODULE_PATH``, you can specify a non-default location
to search for IPA modules.

View File

@@ -0,0 +1,5 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. Getting started information is defined in the project README file.
.. include:: ../README.rst
:start-after: .. section-begin-getting-started
:end-before: .. section-end-getting-started

View File

@@ -0,0 +1,639 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Using libcamera in a C++ application
====================================
This tutorial shows how to create a C++ application that uses libcamera to
interface with a camera on a system, capture frames from it for 3 seconds, and
write metadata about the frames to standard output.
Application skeleton
--------------------
Most of the code in this tutorial runs in the ``int main()`` function
with a separate global function to handle events. The two functions need
to share data, which are stored in global variables for simplicity. A
production-ready application would organize the various objects created
in classes, and the event handler would be a class member function to
provide context data without requiring global variables.
Use the following code snippets as the initial application skeleton.
It already lists all the necessary includes directives and instructs the
compiler to use the libcamera namespace, which gives access to the libcamera
defined names and types without the need of prefixing them.
.. code:: cpp
#include <iomanip>
#include <iostream>
#include <memory>
#include <thread>
#include <libcamera/libcamera.h>
using namespace libcamera;
using namespace std::chrono_literals;
int main()
{
// Code to follow
return 0;
}
Camera Manager
--------------
Every libcamera-based application needs an instance of a `CameraManager`_ that
runs for the life of the application. When the Camera Manager starts, it
enumerates all the cameras detected in the system. Behind the scenes, libcamera
abstracts and manages the complex pipelines that kernel drivers expose through
the `Linux Media Controller`_ and `Video for Linux`_ (V4L2) APIs, meaning that
an application doesn't need to handle device or driver specific details.
.. _CameraManager: https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html
.. _Linux Media Controller: https://www.kernel.org/doc/html/latest/media/uapi/mediactl/media-controller-intro.html
.. _Video for Linux: https://www.linuxtv.org/docs.php
Before the ``int main()`` function, create a global shared pointer
variable for the camera to support the event call back later:
.. code:: cpp
static std::shared_ptr<Camera> camera;
Create a Camera Manager instance at the beginning of the main function, and then
start it. An application must only create a single Camera Manager instance.
The CameraManager can be stored in a unique_ptr to automate deleting the
instance when it is no longer used, but care must be taken to ensure all
cameras are released explicitly before this happens.
.. code:: cpp
std::unique_ptr<CameraManager> cm = std::make_unique<CameraManager>();
cm->start();
During the application initialization, the Camera Manager is started to
enumerate all the supported devices and create cameras that the application can
interact with.
Once the camera manager is started, we can use it to iterate the available
cameras in the system:
.. code:: cpp
for (auto const &camera : cm->cameras())
std::cout << camera->id() << std::endl;
Printing the camera id lists the machine-readable unique identifiers, so for
example, the output on a Linux machine with a connected USB webcam is
``\_SB_.PCI0.XHC_.RHUB.HS08-8:1.0-5986:2115``.
What libcamera considers a camera
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The libcamera library considers any unique source of video frames, which usually
correspond to a camera sensor, as a single camera device. Camera devices expose
streams, which are obtained by processing data from the single image source and
all share some basic properties such as the frame duration and the image
exposure time, as they only depend by the image source configuration.
Applications select one or multiple Camera devices they wish to operate on, and
require frames from at least one of their Streams.
Create and acquire a camera
---------------------------
This example application uses a single camera (the first enumerated one) that
the Camera Manager reports as available to applications.
Camera devices are stored by the CameraManager in a list accessible by index, or
can be retrieved by name through the ``CameraManager::get()`` function. The
code below retrieves the name of the first available camera and gets the camera
by name from the Camera Manager, after making sure that at least one camera is
available.
.. code:: cpp
auto cameras = cm->cameras();
if (cameras.empty()) {
std::cout << "No cameras were identified on the system."
<< std::endl;
cm->stop();
return EXIT_FAILURE;
}
std::string cameraId = cameras[0]->id();
auto camera = cm->get(cameraId);
/*
* Note that `camera` may not compare equal to `cameras[0]`.
* In fact, it might simply be a `nullptr`, as the particular
* device might have disappeared (and reappeared) in the meantime.
*/
Once a camera has been selected an application needs to acquire an exclusive
lock to it so no other application can use it.
.. code:: cpp
camera->acquire();
Configure the camera
--------------------
Before the application can do anything with the camera, it needs to configure
the image format and sizes of the streams it wants to capture frames from.
Stream configurations are represented by instances of the
``StreamConfiguration`` class, which are grouped together in a
``CameraConfiguration`` object. Before an application can start setting its
desired configuration, a ``CameraConfiguration`` instance needs to be generated
from the ``Camera`` device using the ``Camera::generateConfiguration()``
function.
The libcamera library uses the ``StreamRole`` enumeration to define predefined
ways an application intends to use a camera. The
``Camera::generateConfiguration()`` function accepts a list of desired roles and
generates a ``CameraConfiguration`` with the best stream parameters
configuration for each of the requested roles. If the camera can handle the
requested roles, it returns an initialized ``CameraConfiguration`` and a null
pointer if it can't.
It is possible for applications to generate an empty ``CameraConfiguration``
instance by not providing any role. The desired configuration will have to be
filled-in manually and manually validated.
In the example application, create a new configuration variable and use the
``Camera::generateConfiguration`` function to produce a ``CameraConfiguration``
for the single ``StreamRole::Viewfinder`` role.
.. code:: cpp
std::unique_ptr<CameraConfiguration> config = camera->generateConfiguration( { StreamRole::Viewfinder } );
The generated ``CameraConfiguration`` has a ``StreamConfiguration`` instance for
each ``StreamRole`` the application requested. Each of these has a default size
and format that the camera assigned, and a list of supported pixel formats and
sizes.
The code below accesses the first and only ``StreamConfiguration`` item in the
``CameraConfiguration`` and outputs its parameters to standard output.
.. code:: cpp
StreamConfiguration &streamConfig = config->at(0);
std::cout << "Default viewfinder configuration is: " << streamConfig.toString() << std::endl;
This is expected to output something like:
``Default viewfinder configuration is: 1280x720-MJPEG``
Change and validate the configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With an initialized ``CameraConfiguration``, an application can make changes to
the parameters it contains, for example, to change the width and height, use the
following code:
.. code:: cpp
streamConfig.size.width = 640;
streamConfig.size.height = 480;
If an application changes any parameters, it must validate the configuration
before applying it to the camera using the ``CameraConfiguration::validate()``
function. If the new values are not supported by the ``Camera`` device, the
validation process adjusts the parameters to what it considers to be the closest
supported values.
The ``validate`` function returns a `Status`_ which applications shall check to
see if the Pipeline Handler adjusted the configuration.
.. _Status: https://libcamera.org/api-html/classlibcamera_1_1CameraConfiguration.html#a64163f21db2fe1ce0a6af5a6f6847744
For example, the code above set the width and height to 640x480, but if the
camera cannot produce an image that large, it might adjust the configuration to
the supported size of 320x240 and return ``Adjusted`` as validation status
result.
If the configuration to validate cannot be adjusted to a set of supported
values, the validation procedure fails and returns the ``Invalid`` status.
For this example application, the code below prints the adjusted values to
standard out.
.. code:: cpp
config->validate();
std::cout << "Validated viewfinder configuration is: " << streamConfig.toString() << std::endl;
For example, the output might be something like
``Validated viewfinder configuration is: 320x240-MJPEG``
A validated ``CameraConfiguration`` can bet given to the ``Camera`` device to be
applied to the system.
.. code:: cpp
camera->configure(config.get());
If an application doesn't first validate the configuration before calling
``Camera::configure()``, there's a chance that calling the function can fail, if
the given configuration would have to be adjusted.
Allocate FrameBuffers
---------------------
An application needs to reserve the memory that libcamera can write incoming
frames and data to, and that the application can then read. The libcamera
library uses ``FrameBuffer`` instances to represent memory buffers allocated in
memory. An application should reserve enough memory for the frame size the
streams need based on the configured image sizes and formats.
The libcamera library consumes buffers provided by applications as
``FrameBuffer`` instances, which makes libcamera a consumer of buffers exported
by other devices (such as displays or video encoders), or allocated from an
external allocator (such as ION on Android).
In some situations, applications do not have any means to allocate or get hold
of suitable buffers, for instance, when no other device is involved, or on Linux
platforms that lack a centralized allocator. The ``FrameBufferAllocator`` class
provides a buffer allocator an application can use in these situations.
An application doesn't have to use the default ``FrameBufferAllocator`` that
libcamera provides. It can instead allocate memory manually and pass the buffers
in ``Request``\s (read more about ``Request`` in `the frame capture section
<#frame-capture>`_ of this guide). The example in this guide covers using the
``FrameBufferAllocator`` that libcamera provides.
Using the libcamera ``FrameBufferAllocator``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Applications create a ``FrameBufferAllocator`` for a Camera and use it
to allocate buffers for streams of a ``CameraConfiguration`` with the
``allocate()`` function.
The list of allocated buffers can be retrieved using the ``Stream`` instance
as the parameter of the ``FrameBufferAllocator::buffers()`` function.
.. code:: cpp
FrameBufferAllocator *allocator = new FrameBufferAllocator(camera);
for (StreamConfiguration &cfg : *config) {
int ret = allocator->allocate(cfg.stream());
if (ret < 0) {
std::cerr << "Can't allocate buffers" << std::endl;
return -ENOMEM;
}
size_t allocated = allocator->buffers(cfg.stream()).size();
std::cout << "Allocated " << allocated << " buffers for stream" << std::endl;
}
Frame Capture
~~~~~~~~~~~~~
The libcamera library implements a streaming model based on per-frame requests.
For each frame an application wants to capture it must queue a request for it to
the camera. With libcamera, a ``Request`` is at least one ``Stream`` associated
with a ``FrameBuffer`` representing the memory location where frames have to be
stored.
First, by using the ``Stream`` instance associated to each
``StreamConfiguration``, retrieve the list of ``FrameBuffer``\s created for it
using the frame allocator. Then create a vector of requests to be submitted to
the camera.
.. code:: cpp
Stream *stream = streamConfig.stream();
const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator->buffers(stream);
std::vector<std::unique_ptr<Request>> requests;
Proceed to fill the request vector by creating ``Request`` instances from the
camera device, and associate a buffer for each of them for the ``Stream``.
.. code:: cpp
for (unsigned int i = 0; i < buffers.size(); ++i) {
std::unique_ptr<Request> request = camera->createRequest();
if (!request)
{
std::cerr << "Can't create request" << std::endl;
return -ENOMEM;
}
const std::unique_ptr<FrameBuffer> &buffer = buffers[i];
int ret = request->addBuffer(stream, buffer.get());
if (ret < 0)
{
std::cerr << "Can't set buffer for request"
<< std::endl;
return ret;
}
requests.push_back(std::move(request));
}
.. TODO: Controls
.. TODO: A request can also have controls or parameters that you can apply to the image.
Event handling and callbacks
----------------------------
The libcamera library uses the concept of `signals and slots` (similar to `Qt
Signals and Slots`_) to connect events with callbacks to handle them.
.. _signals and slots: https://libcamera.org/api-html/classlibcamera_1_1Signal.html#details
.. _Qt Signals and Slots: https://doc.qt.io/qt-6/signalsandslots.html
The ``Camera`` device emits two signals that applications can connect to in
order to execute callbacks on frame completion events.
The ``Camera::bufferCompleted`` signal notifies applications that a buffer with
image data is available. Receiving notifications about the single buffer
completion event allows applications to implement partial request completion
support, and to inspect the buffer content before the request it is part of has
fully completed.
The ``Camera::requestCompleted`` signal notifies applications that a request
has completed, which means all the buffers the request contains have now
completed. Request completion notifications are always emitted in the same order
as the requests have been queued to the camera.
To receive the signals emission notifications, connect a slot function to the
signal to handle it in the application code.
.. code:: cpp
camera->requestCompleted.connect(requestComplete);
For this example application, only the ``Camera::requestCompleted`` signal gets
handled and the matching ``requestComplete`` slot function outputs information
about the FrameBuffer to standard output. This callback is typically where an
application accesses the image data from the camera and does something with it.
Signals operate in the libcamera ``CameraManager`` thread context, so it is
important not to block the thread for a long time, as this blocks internal
processing of the camera pipelines, and can affect realtime performance.
Handle request completion events
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create the ``requestComplete`` function by matching the slot signature:
.. code:: cpp
static void requestComplete(Request *request)
{
// Code to follow
}
Request completion events can be emitted for requests which have been canceled,
for example, by unexpected application shutdown. To avoid an application
processing invalid image data, it's worth checking that the request has
completed successfully. The list of request completion statuses is available in
the `Request::Status`_ class enum documentation.
.. _Request::Status: https://www.libcamera.org/api-html/classlibcamera_1_1Request.html#a2209ba8d51af8167b25f6e3e94d5c45b
.. code:: cpp
if (request->status() == Request::RequestCancelled)
return;
If the ``Request`` has completed successfully, applications can access the
completed buffers using the ``Request::buffers()`` function, which returns a map
of ``FrameBuffer`` instances associated with the ``Stream`` that produced the
images.
.. code:: cpp
const std::map<const Stream *, FrameBuffer *> &buffers = request->buffers();
Iterating through the map allows applications to inspect each completed buffer
in this request, and access the metadata associated to each frame.
The metadata buffer contains information such the capture status, a timestamp,
and the bytes used, as described in the `FrameMetadata`_ documentation.
.. _FrameMetaData: https://libcamera.org/api-html/structlibcamera_1_1FrameMetadata.html
.. code:: cpp
for (auto bufferPair : buffers) {
FrameBuffer *buffer = bufferPair.second;
const FrameMetadata &metadata = buffer->metadata();
}
For this example application, inside the ``for`` loop from above, we can print
the Frame sequence number and details of the planes.
.. code:: cpp
std::cout << " seq: " << std::setw(6) << std::setfill('0') << metadata.sequence << " bytesused: ";
unsigned int nplane = 0;
for (const FrameMetadata::Plane &plane : metadata.planes())
{
std::cout << plane.bytesused;
if (++nplane < metadata.planes().size()) std::cout << "/";
}
std::cout << std::endl;
The expected output shows each monotonically increasing frame sequence number
and the bytes used by planes.
.. code:: text
seq: 000000 bytesused: 1843200
seq: 000002 bytesused: 1843200
seq: 000004 bytesused: 1843200
seq: 000006 bytesused: 1843200
seq: 000008 bytesused: 1843200
seq: 000010 bytesused: 1843200
seq: 000012 bytesused: 1843200
seq: 000014 bytesused: 1843200
seq: 000016 bytesused: 1843200
seq: 000018 bytesused: 1843200
seq: 000020 bytesused: 1843200
seq: 000022 bytesused: 1843200
seq: 000024 bytesused: 1843200
seq: 000026 bytesused: 1843200
seq: 000028 bytesused: 1843200
seq: 000030 bytesused: 1843200
seq: 000032 bytesused: 1843200
seq: 000034 bytesused: 1843200
seq: 000036 bytesused: 1843200
seq: 000038 bytesused: 1843200
seq: 000040 bytesused: 1843200
seq: 000042 bytesused: 1843200
A completed buffer contains of course image data which can be accessed through
the per-plane dma-buf file descriptor transported by the ``FrameBuffer``
instance. An example of how to write image data to disk is available in the
`FileSink class`_ which is a part of the ``cam`` utility application in the
libcamera repository.
.. _FileSink class: https://git.libcamera.org/libcamera/libcamera.git/tree/src/cam/file_sink.cpp
With the handling of this request completed, it is possible to re-use the
request and the associated buffers and re-queue it to the camera
device:
.. code:: cpp
request->reuse(Request::ReuseBuffers);
camera->queueRequest(request);
Request queueing
----------------
The ``Camera`` device is now ready to receive frame capture requests and
actually start delivering frames. In order to prepare for that, an application
needs to first start the camera, and queue requests to it for them to be
processed.
In the main() function, just after having connected the
``Camera::requestCompleted`` signal to the callback handler, start the camera
and queue all the previously created requests.
.. code:: cpp
camera->start();
for (std::unique_ptr<Request> &request : requests)
camera->queueRequest(request.get());
Event processing
~~~~~~~~~~~~~~~~
libcamera creates an internal execution thread at `CameraManager::start()`_
time to decouple its own event processing from the application's main thread.
Applications are thus free to manage their own execution opportunely, and only
need to respond to events generated by libcamera emitted through signals.
.. _CameraManager::start(): https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html#a49e322880a2a26013bb0076788b298c5
Real-world applications will likely either integrate with the event loop of the
framework they use, or create their own event loop to respond to user events.
For the simple application presented in this example, it is enough to prevent
immediate termination by pausing for 3 seconds. During that time, the libcamera
thread will generate request completion events that the application will handle
in the ``requestComplete()`` slot connected to the ``Camera::requestCompleted``
signal.
.. code:: cpp
std::this_thread::sleep_for(3000ms);
Clean up and stop the application
---------------------------------
The application is now finished with the camera and the resources the camera
uses, so needs to do the following:
- stop the camera
- free the buffers in the FrameBufferAllocator and delete it
- release the lock on the camera and reset the pointer to it
- stop the camera manager
.. code:: cpp
camera->stop();
allocator->free(stream);
delete allocator;
camera->release();
camera.reset();
cm->stop();
return 0;
In this instance the CameraManager will automatically be deleted by the
unique_ptr implementation when it goes out of scope.
Build and run instructions
--------------------------
To build the application, we recommend that you use the `Meson build system`_
which is also the official build system of the libcamera library.
Make sure both ``meson`` and ``libcamera`` are installed in your system. Please
refer to your distribution documentation to install meson and install the most
recent version of libcamera from the `git repository`_. You would also need to
install the ``pkg-config`` tool to correctly identify the libcamera.so object
install location in the system.
.. _Meson build system: https://mesonbuild.com/
.. _git repository: https://git.libcamera.org/libcamera/libcamera.git/
Dependencies
~~~~~~~~~~~~
The test application presented here depends on the libcamera library to be
available in a path that meson can identify. The libcamera install procedure
performed using the ``ninja install`` command may by default deploy the
libcamera components in the ``/usr/local/lib`` path, or a package manager may
install it to ``/usr/lib`` depending on your distribution. If meson is unable to
find the location of the libcamera installation, you may need to instruct meson
to look into a specific path when searching for ``libcamera.so`` by setting the
``PKG_CONFIG_PATH`` environment variable to the right location.
Adjust the following command to use the ``pkgconfig`` directory where libcamera
has been installed in your system.
.. code:: shell
export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/
Verify that ``pkg-config`` can identify the ``libcamera`` library with
.. code:: shell
$ pkg-config --libs --cflags libcamera
-I/usr/local/include/libcamera -L/usr/local/lib -lcamera -lcamera-base
``meson`` can alternatively use ``cmake`` to locate packages, please refer to
the ``meson`` documentation if you prefer to use it in place of ``pkgconfig``
Build file
~~~~~~~~~~
With the dependencies correctly identified, prepare a ``meson.build`` build file
to be placed in the same directory where the application lives. You can
name your application as you like, but be sure to update the following snippet
accordingly. In this example, the application file has been named
``simple-cam.cpp``.
.. code::
project('simple-cam', 'cpp')
simple_cam = executable('simple-cam',
'simple-cam.cpp',
dependencies: dependency('libcamera', required : true))
The ``dependencies`` line instructs meson to ask ``pkgconfig`` (or ``cmake``) to
locate the ``libcamera`` library, which the test application will be
dynamically linked against.
With the build file in place, compile and run the application with:
.. code:: shell
$ meson build
$ cd build
$ ninja
$ ./simple-cam
It is possible to increase the library debug output by using environment
variables which control the library log filtering system:
.. code:: shell
$ LIBCAMERA_LOG_LEVELS=0 ./simple-cam

View File

@@ -0,0 +1,319 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Developers guide to libcamera
=============================
The Linux kernel handles multimedia devices through the 'Linux media' subsystem
and provides a set of APIs (application programming interfaces) known
collectively as V4L2 (`Video for Linux 2`_) and the `Media Controller`_ API
which provide an interface to interact and control media devices.
Included in this subsystem are drivers for camera sensors, CSI2 (Camera
Serial Interface) receivers, and ISPs (Image Signal Processors)
The usage of these drivers to provide a functioning camera stack is a
responsibility that lies in userspace which is commonly implemented separately
by vendors without a common architecture or API for application developers.
libcamera provides a complete camera stack for Linux based systems to abstract
functionality desired by camera application developers and process the
configuration of hardware and image control algorithms required to obtain
desirable results from the camera.
.. _Video for Linux 2: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/v4l2.html
.. _Media Controller: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/mediactl/media-controller.html
In this developers guide, we will explore the `Camera Stack`_ and how it is
can be visualised at a high level, and explore the internal `Architecture`_ of
the libcamera library with its components. The current `Platform Support`_ is
detailed, as well as an overview of the `Licensing`_ requirements of the
project.
This introduction is followed by a walkthrough tutorial to newcomers wishing to
support a new platform with the `Pipeline Handler Writers Guide`_ and for those
looking to make use of the libcamera native API an `Application Writers Guide`_
provides a tutorial of the key APIs exposed by libcamera.
.. _Pipeline Handler Writers Guide: pipeline-handler.html
.. _Application Writers Guide: application-developer.html
.. TODO: Correctly link to the other articles of the guide
Camera Stack
------------
The libcamera library is implemented in userspace, and makes use of underlying
kernel drivers that directly interact with hardware.
Applications can make use of libcamera through the native `libcamera API`_'s or
through an adaptation layer integrating libcamera into a larger framework.
.. _libcamera API: https://www.libcamera.org/api-html/index.html
::
Application Layer
/ +--------------+ +--------------+ +--------------+ +--------------+
| | Native | | Framework | | Native | | Android |
| | V4L2 | | Application | | libcamera | | Camera |
| | Application | | (gstreamer) | | Application | | Framework |
\ +--------------+ +--------------+ +--------------+ +--------------+
^ ^ ^ ^
| | | |
| | | |
v v | v
Adaptation Layer |
/ +--------------+ +--------------+ | +--------------+
| | V4L2 | | gstreamer | | | Android |
| | Compatibility| | element | | | Camera |
| | (preload) | |(libcamerasrc)| | | HAL |
\ +--------------+ +--------------+ | +--------------+
|
^ ^ | ^
| | | |
| | | |
v v v v
libcamera Framework
/ +--------------------------------------------------------------------+
| | |
| | libcamera |
| | |
\ +--------------------------------------------------------------------+
^ ^ ^
Userspace | | |
--------------------- | ---------------- | ---------------- | ---------------
Kernel | | |
v v v
+-----------+ +-----------+ +-----------+
| Media | <--> | Video | <--> | V4L2 |
| Device | | Device | | Subdev |
+-----------+ +-----------+ +-----------+
The camera stack comprises of four software layers. From bottom to top:
* The kernel drivers control the camera hardware and expose a low-level
interface to userspace through the Linux kernel V4L2 family of APIs
(Media Controller API, V4L2 Video Device API and V4L2 Subdev API).
* The libcamera framework is the core part of the stack. It handles all control
of the camera devices in its core component, libcamera, and exposes a native
C++ API to upper layers.
* The libcamera adaptation layer is an umbrella term designating the components
that interface to libcamera in other frameworks. Notable examples are the V4L2
compatibility layer, the gstreamer libcamera element, and the Android camera
HAL implementation based on libcamera which are provided as a part of the
libcamera project.
* The applications and upper level frameworks are based on the libcamera
framework or libcamera adaptation, and are outside of the scope of the
libcamera project, however example native applications (cam, qcam) are
provided for testing.
V4L2 Compatibility Layer
V4L2 compatibility is achieved through a shared library that traps all
accesses to camera devices and routes them to libcamera to emulate high-level
V4L2 camera devices. It is injected in a process address space through
``LD_PRELOAD`` and is completely transparent for applications.
The compatibility layer exposes camera device features on a best-effort basis,
and aims for the level of features traditionally available from a UVC camera
designed for video conferencing.
Android Camera HAL
Camera support for Android is achieved through a generic Android camera HAL
implementation on top of libcamera. The HAL implements features required by
Android and out of scope from libcamera, such as JPEG encoding support.
This component is used to provide support for ChromeOS platforms
GStreamer element (gstlibcamerasrc)
A `GStreamer element`_ is provided to allow capture from libcamera supported
devices through GStreamer pipelines, and connect to other elements for further
processing.
Development of this element is ongoing and is limited to a single stream.
Native libcamera API
Applications can make use of the libcamera API directly using the C++
API. An example application and walkthrough using the libcamera API can be
followed in the `Application Writers Guide`_
.. _GStreamer element: https://gstreamer.freedesktop.org/documentation/application-development/basics/elements.html
Architecture
------------
While offering a unified API towards upper layers, and presenting itself as a
single library, libcamera isn't monolithic. It exposes multiple components
through its public API and is built around a set of separate helpers internally.
Hardware abstractions are handled through the use of device-specific components
where required and dynamically loadable plugins are used to separate image
processing algorithms from the core libcamera codebase.
::
--------------------------< libcamera Public API >---------------------------
^ ^
| |
v v
+-------------+ +---------------------------------------------------+
| Camera | | Camera Device |
| Manager | | +-----------------------------------------------+ |
+-------------+ | | Device-Agnostic | |
^ | | | |
| | | +--------------------------+ |
| | | | ~~~~~~~~~~~~~~~~~~~~~~~ |
| | | | { +-----------------+ } |
| | | | } | //// Image //// | { |
| | | | <-> | / Processing // | } |
| | | | } | / Algorithms // | { |
| | | | { +-----------------+ } |
| | | | ~~~~~~~~~~~~~~~~~~~~~~~ |
| | | | ========================== |
| | | | +-----------------+ |
| | | | | // Pipeline /// | |
| | | | <-> | /// Handler /// | |
| | | | | /////////////// | |
| | +--------------------+ +-----------------+ |
| | Device-Specific |
| +---------------------------------------------------+
| ^ ^
| | |
v v v
+--------------------------------------------------------------------+
| Helpers and Support Classes |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| | MC & V4L2 | | Buffers | | Sandboxing | | Plugins | |
| | Support | | Allocator | | IPC | | Manager | |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| +-------------+ +-------------+ |
| | Pipeline | | ... | |
| | Runner | | | |
| +-------------+ +-------------+ |
+--------------------------------------------------------------------+
/// Device-Specific Components
~~~ Sandboxing
Camera Manager
The Camera Manager enumerates cameras and instantiates Pipeline Handlers to
manage each Camera that libcamera supports. The Camera Manager supports
hotplug detection and notification events when supported by the underlying
kernel devices.
There is only ever one instance of the Camera Manager running per application.
Each application's instance of the Camera Manager ensures that only a single
application can take control of a camera device at once.
Read the `Camera Manager API`_ documentation for more details.
.. _Camera Manager API: https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html
Camera Device
The Camera class represents a single item of camera hardware that is capable
of producing one or more image streams, and provides the API to interact with
the underlying device.
If a system has multiple instances of the same hardware attached, each has its
own instance of the camera class.
The API exposes full control of the device to upper layers of libcamera through
the public API, making it the highest level object libcamera exposes, and the
object that all other API operations interact with from configuration to
capture.
Read the `Camera API`_ documentation for more details.
.. _Camera API: https://libcamera.org/api-html/classlibcamera_1_1Camera.html
Pipeline Handler
The Pipeline Handler manages the complex pipelines exposed by the kernel
drivers through the Media Controller and V4L2 APIs. It abstracts pipeline
handling to hide device-specific details from the rest of the library, and
implements both pipeline configuration based on stream configuration, and
pipeline runtime execution and scheduling when needed by the device.
The Pipeline Handler lives in the same process as the rest of the library, and
has access to all helpers and kernel camera-related devices.
Hardware abstraction is handled by device specific Pipeline Handlers which are
derived from the Pipeline Handler base class allowing commonality to be shared
among the implementations.
Derived pipeline handlers create Camera device instances based on the devices
they detect and support on the running system, and are responsible for
managing the interactions with a camera device.
More details can be found in the `PipelineHandler API`_ documentation, and the
`Pipeline Handler Writers Guide`_.
.. _PipelineHandler API: https://libcamera.org/api-html/classlibcamera_1_1PipelineHandler.html
Image Processing Algorithms
An image processing algorithm (IPA) component is a loadable plugin that
implements 3A (Auto-Exposure, Auto-White Balance, and Auto-Focus) and other
algorithms.
The algorithms run on the CPU and interact with the camera devices through the
Pipeline Handler to control hardware image processing based on the parameters
supplied by upper layers, maintaining state and closing the control loop
of the ISP.
The component is sandboxed and can only interact with libcamera through the
API provided by the Pipeline Handler and an IPA has no direct access to kernel
camera devices.
Open source IPA modules built with libcamera can be run in the same process
space as libcamera, however external IPA modules are run in a separate process
from the main libcamera process. IPA modules have a restricted view of the
system, including no access to networking APIs and limited access to file
systems.
IPA modules are only required for platforms and devices with an ISP controlled
by the host CPU. Camera sensors which have an integrated ISP are not
controlled through the IPA module.
Platform Support
----------------
The library currently supports the following hardware platforms specifically
with dedicated pipeline handlers:
- Intel IPU3 (ipu3)
- Rockchip RK3399 (rkisp1)
- RaspberryPi 3 and 4 (rpi/vc4)
Furthermore, generic platform support is provided for the following:
- USB video device class cameras (uvcvideo)
- iMX7, Allwinner Sun6i (simple)
- Virtual media controller driver for test use cases (vimc)
Licensing
---------
The libcamera core, is covered by the `LGPL-2.1-or-later`_ license. Pipeline
Handlers are a part of the libcamera code base and need to be contributed
upstream by device vendors. IPA modules included in libcamera are covered by a
free software license, however third-parties may develop IPA modules outside of
libcamera and distribute them under a closed-source license, provided they do
not include source code from the libcamera project.
The libcamera project itself contains multiple libraries, applications and
utilities. Licenses are expressed through SPDX tags in text-based files that
support comments, and through the .reuse/dep5 file otherwise. A copy of all
licenses are stored in the LICENSES directory, and a full summary of the
licensing used throughout the project can be found in the COPYING.rst document.
Applications which link dynamically against libcamera and use only the public
API are an independent work of the authors and have no license restrictions
imposed upon them from libcamera.
.. _LGPL-2.1-or-later: https://spdx.org/licenses/LGPL-2.1-or-later.html

View File

@@ -0,0 +1,531 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
IPA Writer's Guide
==================
IPA modules are Image Processing Algorithm modules. They provide functionality
that the pipeline handler can use for image processing.
This guide covers the definition of the IPA interface, and how to plumb the
connection between the pipeline handler and the IPA.
The IPA interface and protocol
------------------------------
The IPA interface defines the interface between the pipeline handler and the
IPA. Specifically, it defines the functions that the IPA exposes that the
pipeline handler can call, and the signals that the pipeline handler can
connect to, in order to receive data from the IPA asynchronously. In addition,
it contains any custom data structures that the pipeline handler and IPA may
pass to each other.
It is possible to use the same IPA interface with multiple pipeline handlers
on different hardware platforms. Generally in such cases, these platforms would
have a common hardware ISP pipeline. For instance, the rkisp1 pipeline handler
supports both the RK3399 and the i.MX8MP as they integrate the same ISP.
However, the i.MX8MP has a more complex camera pipeline, which may call for a
dedicated pipeline handler in the future. As the ISP is the same as for RK3399,
the same IPA interface could be used for both pipeline handlers. The build files
provide a mapping from pipeline handler to the IPA interface name as detailed in
:ref:`compiling-section`.
The IPA protocol refers to the agreement between the pipeline handler and the
IPA regarding the expected response(s) from the IPA for given calls to the IPA.
This protocol doesn't need to be declared anywhere in code, but it shall be
documented, as there may be multiple IPA implementations for one pipeline
handler.
As part of the design of libcamera, IPAs may be isolated in a separate process,
or run in the same process but a different thread from libcamera. The pipeline
handler and IPA shall not have to change their operation based on whether the
IPA is isolated or not, but the possibility of isolation needs to be kept in
mind. Therefore all data that is passed between them must be serializable, so
they must be defined separately in the `mojo Interface Definition Language`_
(IDL), and a code generator will generate headers and serializers corresponding
to the definitions. Every interface is defined in a mojom file and includes:
- the functions that the pipeline handler can call from the IPA
- signals in the pipeline handler that the IPA can emit
- any data structures that are to be passed between the pipeline handler and the IPA
All IPA modules of a given pipeline handler use the same IPA interface. The IPA
interface definition is thus written by the pipeline handler author, based on
how they design the interactions between the pipeline handler and the IPA.
The entire IPA interface, including the functions, signals, and any custom
structs shall be defined in a file named {interface_name}.mojom under
include/libcamera/ipa/.
.. _mojo Interface Definition Language: https://chromium.googlesource.com/chromium/src.git/+/master/mojo/public/tools/bindings/README.md
Namespacing
-----------
To avoid name collisions between data types defined by different IPA interfaces
and data types defined by libcamera, each IPA interface must be defined in its
own namespace.
The namespace is specific with mojo's module directive. It must be the first
non-comment line in the mojo data definition file. For example, the Raspberry
Pi IPA interface uses:
.. code-block:: none
module ipa.rpi;
This will become the ipa::rpi namespace in C++ code.
Data containers
---------------
Since the data passed between the pipeline handler and the IPA must support
serialization, any custom data containers must be defined with the mojo IDL.
The following list of libcamera objects are supported in the interface
definition, and may be used as function parameter types or struct field types:
- libcamera.ControlInfoMap
- libcamera.ControlList
- libcamera.FileDescriptor
- libcamera.IPABuffer
- libcamera.IPACameraSensorInfo
- libcamera.IPASettings
- libcamera.IPAStream
- libcamera.Point
- libcamera.Rectangle
- libcamera.Size
- libcamera.SizeRange
To use them, core.mojom must be included in the mojo data definition file:
.. code-block:: none
import "include/libcamera/ipa/core.mojom";
Other custom structs may be defined and used as well. There is no requirement
that they must be defined before usage. enums and structs are supported.
The following is an example of a definition of an enum, for the purpose of
being used as flags:
.. code-block:: none
enum ConfigParameters {
ConfigLsTable = 0x01,
ConfigStaggeredWrite = 0x02,
ConfigSensor = 0x04,
ConfigDropFrames = 0x08,
};
The following is an example of a definition of a struct:
.. code-block:: none
struct ConfigInput {
uint32 op;
uint32 transform;
libcamera.FileDescriptor lsTableHandle;
int32 lsTableHandleStatic = -1;
map<uint32, libcamera.IPAStream> streamConfig;
array<libcamera.IPABuffer> buffers;
};
This example has some special things about it. First of all, it uses the
FileDescriptor data type. This type must be used to ensure that the file
descriptor that it contains is translated properly across the IPC boundary
(when the IPA is in an isolated process).
This does mean that if the file descriptor should be sent without being
translated (for example, for the IPA to tell the pipeline handler which
fd *that the pipeline handler holds* to act on), then it must be in a
regular int32 type.
This example also illustrates that struct fields may have default values, as
is assigned to lsTableHandleStatic. This is the value that the field will
take when the struct is constructed with the default constructor.
Arrays and maps are supported as well. They are translated to C++ vectors and
maps, respectively. The members of the arrays and maps are embedded, and cannot
be const.
Note that nullable fields, static-length arrays, handles, and unions, which
are supported by mojo, are not supported by our code generator.
The Main IPA interface
----------------------
The IPA interface is split in two parts, the Main IPA interface, which
describes the functions that the pipeline handler can call from the IPA,
and the Event IPA interface, which describes the signals received by the
pipeline handler that the IPA can emit. Both must be defined. This section
focuses on the Main IPA interface.
The main interface must be named as IPA{interface_name}Interface.
The functions that the pipeline handler can call from the IPA may be
synchronous or asynchronous. Synchronous functions do not return until the IPA
returns from the function, while asynchronous functions return immediately
without waiting for the IPA to return.
At a minimum, the following three functions must be present (and implemented):
- init();
- start();
- stop();
All three of these functions are synchronous. The parameters for start() and
init() may be customized.
init() initializes the IPA interface. It shall be called before any other
function of the IPAInterface.
stop() informs the IPA module that the camera is stopped. The IPA module shall
release resources prepared in start().
A configure() function is recommended. Any ControlInfoMap instances that will be
used by the IPA must be sent to the IPA from the pipeline handler, at configure
time, for example.
All input parameters will become const references, except for arithmetic types,
which will be passed by value. Output parameters will become pointers, unless
the first output parameter is an int32, or there is only one primitive output
parameter, in which case it will become a regular return value.
const is not allowed inside of arrays and maps. mojo arrays will become C++
std::vector<>.
By default, all functions defined in the main interface are synchronous. This
means that in the case of IPC (i.e. isolated IPA), the function call will not
return until the return value or output parameters are ready. To specify an
asynchronous function, the [async] attribute can be used. Asynchronous
functions must not have any return value or output parameters, since in the
case of IPC the call needs to return immediately.
It is also possible that the IPA will not be run in isolation. In this case,
the IPA thread will not exist until start() is called. This means that in the
case of no isolation, asynchronous calls cannot be made before start(). Since
the IPA interface must be the same regardless of isolation, the same
restriction applies to the case of isolation, and any function that will be
called before start() must be synchronous.
In addition, any call made after start() and before stop() must be
asynchronous. The motivation for this is to avoid damaging real-time
performance of the pipeline handler. If the pipeline handler wants some data
from the IPA, the IPA should return the data asynchronously via an event
(see "The Event IPA interface").
The following is an example of a main interface definition:
.. code-block:: none
interface IPARPiInterface {
init(libcamera.IPASettings settings, string sensorName)
=> (int32 ret, bool metadataSupport);
start() => (int32 ret);
stop();
configure(libcamera.IPACameraSensorInfo sensorInfo,
map<uint32, libcamera.IPAStream> streamConfig,
map<uint32, libcamera.ControlInfoMap> entityControls,
ConfigInput ipaConfig)
=> (int32 ret, ConfigOutput results);
mapBuffers(array<IPABuffer> buffers);
unmapBuffers(array<uint32> ids);
[async] signalStatReady(uint32 bufferId);
[async] signalQueueRequest(libcamera.ControlList controls);
[async] signalIspPrepare(ISPConfig data);
};
The first three functions are the required functions. Functions do not need to
have return values, like stop(), mapBuffers(), and unmapBuffers(). In the case
of asynchronous functions, as explained before, they *must not* have return
values.
The Event IPA interface
-----------------------
The event IPA interface describes the signals received by the pipeline handler
that the IPA can emit. It must be defined. If there are no event functions,
then it may be empty. These emissions are meant to notify the pipeline handler
of some event, such as request data is ready, and *must not* be used to drive
the camera pipeline from the IPA.
The event interface must be named as IPA{interface_name}EventInterface.
Functions defined in the event interface are implicitly asynchronous.
Thus they cannot return any value. Specifying the [async] tag is not
necessary.
Functions defined in the event interface will become signals in the IPA
interface. The IPA can emit signals, while the pipeline handler can connect
slots to them.
The following is an example of an event interface definition:
.. code-block:: none
interface IPARPiEventInterface {
statsMetadataComplete(uint32 bufferId,
libcamera.ControlList controls);
runIsp(uint32 bufferId);
embeddedComplete(uint32 bufferId);
setIsp(libcamera.ControlList controls);
setStaggered(libcamera.ControlList controls);
};
.. _compiling-section:
Compiling the IPA interface
---------------------------
After the IPA interface is defined in include/libcamera/ipa/{interface_name}.mojom,
an entry for it must be added in meson so that it can be compiled. The filename
must be added to the pipeline_ipa_mojom_mapping variable in
include/libcamera/ipa/meson.build. This variable maps the pipeline handler name
to its IPA interface file.
For example, adding the raspberrypi.mojom file to meson:
.. code-block:: none
pipeline_ipa_mojom_mapping = [
'rpi/vc4': 'raspberrypi.mojom',
]
This will cause the mojo data definition file to be compiled. Specifically, it
generates five files:
- a header describing the custom data structures, and the complete IPA
interface (at {$build_dir}/include/libcamera/ipa/{interface}_ipa_interface.h)
- a serializer implementing de/serialization for the custom data structures (at
{$build_dir}/include/libcamera/ipa/{interface}_ipa_serializer.h)
- a proxy header describing a specialized IPA proxy (at
{$build_dir}/include/libcamera/ipa/{interface}_ipa_proxy.h)
- a proxy source implementing the IPA proxy (at
{$build_dir}/src/libcamera/proxy/{interface}_ipa_proxy.cpp)
- a proxy worker source implementing the other end of the IPA proxy (at
{$build_dir}/src/libcamera/proxy/worker/{interface}_ipa_proxy_worker.cpp)
The IPA proxy serves as the layer between the pipeline handler and the IPA, and
handles threading vs isolation transparently. The pipeline handler and the IPA
only require the interface header and the proxy header. The serializer is only
used internally by the proxy.
Using the custom data structures
--------------------------------
To use the custom data structures that are defined in the mojo data definition
file, the following header must be included:
.. code-block:: C++
#include <libcamera/ipa/{interface_name}_ipa_interface.h>
The POD types of the structs simply become their C++ counterparts, eg. uint32
in mojo will become uint32_t in C++. mojo map becomes C++ std::map, and mojo
array becomes C++ std::vector. All members of maps and vectors are embedded,
and are not pointers. The members cannot be const.
The names of all the fields of structs can be used in C++ in exactly the same
way as they are defined in the data definition file. For example, the following
struct as defined in the mojo file:
.. code-block:: none
struct SensorConfig {
uint32 gainDelay = 1;
uint32 exposureDelay;
uint32 sensorMetadata;
};
Will become this in C++:
.. code-block:: C++
struct SensorConfig {
uint32_t gainDelay;
uint32_t exposureDelay;
uint32_t sensorMetadata;
};
The generated structs will also have two constructors, a constructor that
fills all fields with the default values, and a second constructor that takes
a value for every field. The default value constructor will fill in the fields
with the specified default value if it exists. In the above example, `gainDelay_`
will be initialized to 1. If no default value is specified, then it will be
filled in as zero (or -1 for a FileDescriptor type).
All fields and constructors/destructors in these generated structs are public.
Using the IPA interface (pipeline handler)
------------------------------------------
The following headers are necessary to use an IPA in the pipeline handler
(with raspberrypi as an example):
.. code-block:: C++
#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/ipa/raspberrypi_ipa_proxy.h>
The first header includes definitions of the custom data structures, and
the definition of the complete IPA interface (including both the Main and
the Event IPA interfaces). The name of the header file comes from the name
of the mojom file, which in this case was raspberrypi.mojom.
The second header includes the definition of the specialized IPA proxy. It
exposes the complete IPA interface. We will see how to use it in this section.
In the pipeline handler, we first need to construct a specialized IPA proxy.
From the point of view of the pipeline hander, this is the object that is the
IPA.
To do so, we invoke the IPAManager:
.. code-block:: C++
std::unique_ptr<ipa::rpi::IPAProxyRPi> ipa_ =
IPAManager::createIPA<ipa::rpi::IPAProxyRPi>(pipe_, 1, 1);
The ipa::rpi namespace comes from the namespace that we defined in the mojo
data definition file, in the "Namespacing" section. The name of the proxy,
IPAProxyRPi, comes from the name given to the main IPA interface,
IPARPiInterface, in the "The Main IPA interface" section.
The return value of IPAManager::createIPA shall be error-checked, to confirm
that the returned pointer is not a nullptr.
After this, before initializing the IPA, slots should be connected to all of
the IPA's signals, as defined in the Event IPA interface:
.. code-block:: C++
ipa_->statsMetadataComplete.connect(this, &RPiCameraData::statsMetadataComplete);
ipa_->runIsp.connect(this, &RPiCameraData::runIsp);
ipa_->embeddedComplete.connect(this, &RPiCameraData::embeddedComplete);
ipa_->setIsp.connect(this, &RPiCameraData::setIsp);
ipa_->setStaggered.connect(this, &RPiCameraData::setStaggered);
The slot functions have a function signature based on the function definition
in the Event IPA interface. All plain old data (POD) types are as-is (with
their C++ versions, eg. uint32 -> uint32_t), and all structs are const references.
For example, for the following entry in the Event IPA interface:
.. code-block:: none
statsMetadataComplete(uint32 bufferId, ControlList controls);
A function with the following function signature shall be connected to the
signal:
.. code-block:: C++
void statsMetadataComplete(uint32_t bufferId, const ControlList &controls);
After connecting the slots to the signals, the IPA should be initialized
(using the main interface definition example from earlier):
.. code-block:: C++
IPASettings settings{};
bool metadataSupport;
int ret = ipa_->init(settings, "sensor name", &metadataSupport);
At this point, any IPA functions that were defined in the Main IPA interface
can be called as if they were regular member functions, for example (based on
the main interface definition example from earlier):
.. code-block:: C++
ipa_->start();
int ret = ipa_->configure(sensorInfo_, streamConfig, entityControls, ipaConfig, &result);
ipa_->signalStatReady(RPi::BufferMask::STATS | static_cast<unsigned int>(index));
Remember that any functions designated as asynchronous *must not* be called
before start().
Notice that for both init() and configure(), the first output parameter is a
direct return, since it is an int32, while the other output parameter is a
pointer-based output parameter.
Using the IPA interface (IPA Module)
------------------------------------
The following header is necessary to implement an IPA Module (with raspberrypi
as an example):
.. code-block:: C++
#include <libcamera/ipa/raspberrypi_ipa_interface.h>
This header includes definitions of the custom data structures, and
the definition of the complete IPA interface (including both the Main and
the Event IPA interfaces). The name of the header file comes from the name
of the mojom file, which in this case was raspberrypi.mojom.
The IPA module must implement the IPA interface class that is defined in the
header. In the case of our example, that is ipa::rpi::IPARPiInterface. The
ipa::rpi namespace comes from the namespace that we defined in the mojo data
definition file, in the "Namespacing" section. The name of the interface is the
same as the name given to the Main IPA interface.
The function signature rules are the same as for the slots in the pipeline
handler side; PODs are passed by value, and structs are passed by const
reference. For the Main IPA interface, output values are also allowed (only
for synchronous calls), so there may be output parameters as well. If the
first output parameter is a POD it will be returned by value, otherwise
it will be returned by an output parameter pointer. The second and any other
output parameters will also be returned by output parameter pointers.
For example, for the following function specification in the Main IPA interface
definition:
.. code-block:: none
configure(libcamera.IPACameraSensorInfo sensorInfo,
uint32 exampleNumber,
map<uint32, libcamera.IPAStream> streamConfig,
map<uint32, libcamera.ControlInfoMap> entityControls,
ConfigInput ipaConfig)
=> (int32 ret, ConfigOutput results);
We will need to implement a function with the following function signature:
.. code-block:: C++
int configure(const IPACameraSensorInfo &sensorInfo,
uint32_t exampleNumber,
const std::map<unsigned int, IPAStream> &streamConfig,
const std::map<unsigned int, ControlInfoMap> &entityControls,
const ipa::rpi::ConfigInput &data,
ipa::rpi::ConfigOutput *response);
The return value is int, because the first output parameter is int32. The rest
of the output parameters (in this case, only response) become output parameter
pointers. The non-POD input parameters become const references, and the POD
input parameter is passed by value.
At any time after start() and before stop() (though usually only in response to
an IPA call), the IPA may send data to the pipeline handler by emitting
signals. These signals are defined in the C++ IPA interface class (which is in
the generated and included header).
For example, for the following function defined in the Event IPA interface:
.. code-block:: none
statsMetadataComplete(uint32 bufferId, libcamera.ControlList controls);
We can emit a signal like so:
.. code-block:: C++
statsMetadataComplete.emit(bufferId & RPi::BufferMask::ID, libcameraMetadata_);

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,147 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Tracing Guide
=============
Guide to tracing in libcamera.
Profiling vs Tracing
--------------------
Tracing is recording timestamps at specific locations. libcamera provides a
tracing facility. This guide shows how to use this tracing facility.
Tracing should not be confused with profiling, which samples execution
at periodic points in time. This can be done with other tools such as
callgrind, perf, gprof, etc., without modification to the application,
and is out of scope for this guide.
Compiling
---------
To compile libcamera with tracing support, it must be enabled through the
meson ``tracing`` option. It depends on the lttng-ust library (available in the
``liblttng-ust-dev`` package for Debian-based distributions).
By default the tracing option in meson is set to ``auto``, so if
liblttng is detected, it will be enabled by default. Conversely, if the option
is set to disabled, then libcamera will be compiled without tracing support.
Defining tracepoints
--------------------
libcamera already contains a set of tracepoints. To define additional
tracepoints, create a file
``include/libcamera/internal/tracepoints/{file}.tp``, where ``file`` is a
reasonable name related to the category of tracepoints that you wish to
define. For example, the tracepoints file for the Request object is called
``request.tp``. An entry for this file must be added in
``include/libcamera/internal/tracepoints/meson.build``.
In this tracepoints file, define your tracepoints `as mandated by lttng
<https://lttng.org/man/3/lttng-ust>`_. The header boilerplate must *not* be
included (as it will conflict with the rest of our infrastructure), and
only the tracepoint definitions (with the ``TRACEPOINT_*`` macros) should be
included.
All tracepoint providers shall be ``libcamera``. According to lttng, the
tracepoint provider should be per-project; this is the rationale for this
decision. To group tracepoint events, we recommend using
``{class_name}_{tracepoint_name}``, for example, ``request_construct`` for a
tracepoint for the constructor of the Request class.
Tracepoint arguments may take C++ objects pointers, in which case the usual
C++ namespacing rules apply. The header that contains the necessary class
definitions must be included at the top of the tracepoint provider file.
Note: the final parameter in ``TP_ARGS`` *must not* have a trailing comma, and
the parameters to ``TP_FIELDS`` are *space-separated*. Not following these will
cause compilation errors.
Using tracepoints (in libcamera)
--------------------------------
To use tracepoints in libcamera, first the header needs to be included:
``#include "libcamera/internal/tracepoints.h"``
Then to use the tracepoint:
``LIBCAMERA_TRACEPOINT({tracepoint_event}, args...)``
This macro must be used, as opposed to lttng's macros directly, because
lttng is an optional dependency of libcamera, so the code must compile and run
even when lttng is not present or when tracing is disabled.
The tracepoint provider name, as declared in the tracepoint definition, is not
included in the parameters of the tracepoint.
There are also two special tracepoints available for tracing IPA calls:
``LIBCAMERA_TRACEPOINT_IPA_BEGIN({pipeline_name}, {ipa_function})``
``LIBCAMERA_TRACEPOINT_IPA_END({pipeline_name}, {ipa_function})``
These shall be placed where an IPA function is called from the pipeline handler,
and when the pipeline handler receives the corresponding response from the IPA,
respectively. These are the tracepoints that our sample analysis script
(see "Analyzing a trace") scans for when computing statistics on IPA call time.
Using tracepoints (from an application)
---------------------------------------
As applications are not part of libcamera, but rather users of libcamera,
applications should seek their own tracing mechanisms. For ease of tracing
the application alongside tracing libcamera, it is recommended to also
`use lttng <https://lttng.org/docs/#doc-tracing-your-own-user-application>`_.
Using tracepoints (from closed-source IPA)
------------------------------------------
Similar to applications, closed-source IPAs can simply use lttng on their own,
or any other tracing mechanism if desired.
Collecting a trace
------------------
A trace can be collected fairly simply from lttng:
.. code-block:: bash
lttng create $SESSION_NAME
lttng enable-event -u libcamera:\*
lttng start
# run libcamera application
lttng stop
lttng view
lttng destroy $SESSION_NAME
See the `lttng documentation <https://lttng.org/docs/>`_ for further details.
The location of the trace file is printed when running
``lttng create $SESSION_NAME``. After destroying the session, it can still be
viewed by: ``lttng view -t $PATH_TO_TRACE``, where ``$PATH_TO_TRACE`` is the
path that was printed when the session was created. This is the same path that
is used when analyzing traces programatically, as described in the next section.
Analyzing a trace
-----------------
As mentioned above, while an lttng tracing session exists and the trace is not
running, the trace output can be viewed as text by ``lttng view``.
The trace log can also be viewed as text using babeltrace2. See the
`lttng trace analysis documentation
<https://lttng.org/docs/#doc-viewing-and-analyzing-your-traces-bt>`_
for further details.
babeltrace2 also has a C API and python bindings that can be used to process
traces. See the
`lttng python bindings documentation <https://babeltrace.org/docs/v2.0/python/bt2/>`_
and the
`lttng C API documentation <https://babeltrace.org/docs/v2.0/libbabeltrace2/>`_
for more details.
As an example, there is a script ``utils/tracepoints/analyze-ipa-trace.py``
that gathers statistics for the time taken for an IPA function call, by
measuring the time difference between pairs of events
``libcamera:ipa_call_start`` and ``libcamera:ipa_call_finish``.

View File

@@ -0,0 +1,132 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate0.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="1.4854147"
inkscape:cx="666.48052"
inkscape:cy="448.35962"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;paint-order:markers stroke fill;stroke-dasharray:none"
id="rect1"
width="152.88184"
height="119.41136"
x="77.237244"
y="81.982094" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;paint-order:markers stroke fill;stroke-dasharray:none"
id="rect2"
width="49.755535"
height="36.468258"
x="92.612343"
y="98.912964" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="167.25099"
y="98.912964" />
<g
id="g4"
transform="translate(-0.98582077)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 244.95942,81.765726 62.444825,81.97209 154.25639,28.65633 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-dasharray:none"
d="m 199.76751,33.368887 0.0285,21.581353"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-dasharray:none"
d="m 215.59016,33.189206 0.0959,31.330304"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.42835,33.189356 25.2821,-0.220612"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-dasharray:none"
d="m 195.19248,33.096339 -0.0701,-5.375793 23.77787,-0.05613 0.0553,5.315811"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.20874,25.616264 25.25485,-0.02536"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 195.03436,26.298566 -0.0455,-5.426692 23.77787,-0.05613 0.0553,5.315811"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

View File

@@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate0Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.82900578"
inkscape:cx="599.51331"
inkscape:cy="579.00682"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-230.13463"
y="81.982094"
transform="scale(-1,1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-214.75954"
y="98.912964"
transform="scale(-1,1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-140.12088"
y="98.912964"
transform="scale(-1,1)" />
<g
id="g4"
transform="matrix(-1,0,0,1,308.35769,0)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 62.412454,81.765726 244.92705,81.97209 153.11548,28.65633 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 107.60436,33.368887 -0.0285,21.581353"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 91.781714,33.189206 -0.0959,31.330304"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 112.94352,33.189356 87.661424,32.968744"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.17939,33.096339 0.0701,-5.375793 -23.777866,-0.05613 -0.0553,5.315811"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 113.16313,25.616264 87.908284,25.590904"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.33751,26.298566 0.0455,-5.426692 -23.777866,-0.05613 -0.0553,5.315811"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate180.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-230.13461"
y="-140.22527"
transform="scale(-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-214.75951"
y="-123.2944"
transform="scale(-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-140.12086"
y="-123.2944"
transform="scale(-1)" />
<g
id="g4"
transform="rotate(180,154.17884,111.10368)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 62.412437,140.44163 182.514593,-0.20636 -91.81156,53.31576 z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 107.60435,188.83847 -0.0285,-21.58135"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 91.781697,189.01815 -0.0959,-31.3303"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.94351,189.018 -25.282103,0.22061"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.17938,189.11102 0.0701,5.37579 -23.777873,0.0561 -0.0553,-5.31581"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 113.16312,196.59109 -25.254853,0.0254"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.3375,195.90879 0.0455,5.42669 -23.777873,0.0561 -0.0553,-5.31581"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate180Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="77.237228"
y="-140.22527"
transform="scale(1,-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="92.612335"
y="-123.2944"
transform="scale(1,-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="167.25098"
y="-123.2944"
transform="scale(1,-1)" />
<g
id="g4"
transform="matrix(1,0,0,-1,-0.98584226,222.20736)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 244.9594,140.44163 62.444808,140.23527 154.25637,193.55103 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 199.76749,188.83847 0.0285,-21.58135"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 215.59014,189.01815 0.0959,-31.3303"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.42833,189.018 25.2821,0.22061"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 195.19246,189.11102 -0.0701,5.37579 23.77787,0.0561 0.0553,-5.31581"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.20872,196.59109 25.25485,0.0254"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 195.03434,195.90879 -0.0455,5.42669 23.77787,0.0561 0.0553,-5.31581"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate270.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.26451"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-187.55237"
y="124.56432"
transform="rotate(-90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-172.17726"
y="141.49518"
transform="rotate(-90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-97.538612"
y="141.49518"
transform="rotate(-90)" />
<g
id="g4"
transform="rotate(-90,154.17883,111.5966)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 124.34796,19.830188 124.55432,202.34478 71.238559,110.53322 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.951119,65.022101 21.58135,-0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.771439,49.199448 31.330301,-0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 75.771589,70.361261 75.550979,45.079158"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.678569,69.597131 -5.37579,0.0701 -0.0561,-23.777873 5.31581,-0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.198499,70.580871 -0.0254,-25.254853"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.880799,69.755251 -5.42669,0.0455 -0.0561,-23.777873 5.31581,-0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate270Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-187.55237"
y="-182.80751"
transform="matrix(0,-1,-1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-172.17726"
y="-165.87666"
transform="matrix(0,-1,-1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-97.538612"
y="-165.87666"
transform="matrix(0,-1,-1,0,0,0)" />
<g
id="g4"
transform="matrix(0,-1,-1,0,264.78961,265.77543)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 183.02388,19.830188 -0.20636,182.514592 53.31576,-91.81156 z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.42072,65.022101 -21.58135,-0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.6004,49.199448 -31.3303,-0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.60025,70.361261 0.22061,-25.282103"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.69327,69.597131 5.37579,0.0701 0.0561,-23.777873 -5.31581,-0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 239.17334,70.580871 0.0254,-25.254853"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 238.49104,69.755251 5.42669,0.0455 0.0561,-23.777873 -5.31581,-0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate90.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.26451"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="34.65498"
y="-182.80751"
transform="rotate(90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="50.030079"
y="-165.87665"
transform="rotate(90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="124.66872"
y="-165.87665"
transform="rotate(90)" />
<g
id="g4"
transform="rotate(90,154.17885,110.61076)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 183.02388,202.37715 -0.20636,-182.51459 53.31576,91.81156 z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.42072,157.18524 -21.58135,0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.6004,173.00789 -31.3303,0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.60025,151.84608 0.22061,25.2821"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.69327,152.61021 5.37579,-0.0701 0.0561,23.77787 -5.31581,0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 239.17334,151.62647 0.0254,25.25485"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 238.49104,152.45209 5.42669,-0.0455 0.0561,23.77787 -5.31581,0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

View File

@@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate90Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="34.65498"
y="124.56432"
transform="matrix(0,1,1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="50.030079"
y="141.49519"
transform="matrix(0,1,1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="124.66872"
y="141.49519"
transform="matrix(0,1,1,0,0,0)" />
<g
id="g4"
transform="matrix(0,1,1,0,42.582224,-43.56809)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 124.34795,202.37715 124.55431,19.86256 71.238554,111.67412 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.951114,157.18524 21.58135,0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.771434,173.00789 31.330296,0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.771584,151.84608 -0.22061,25.2821"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.678564,152.61021 -5.37579,-0.0701 -0.0561,23.77787 5.31581,0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.198494,151.62647 -0.0254,25.25485"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.880794,152.45209 -5.42669,-0.0455 -0.0561,23.77787 5.31581,0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@@ -0,0 +1,27 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. Front page matter is defined in the project README file.
.. include:: ../README.rst
:start-after: .. section-begin-libcamera
:end-before: .. section-end-libcamera
.. toctree::
:maxdepth: 1
:caption: Contents:
Home <self>
Docs <docs>
Contribute <contributing>
Getting Started <getting-started>
Developer Guide <guides/introduction>
Application Writer's Guide <guides/application-developer>
Pipeline Handler Writer's Guide <guides/pipeline-handler>
IPA Writer's guide <guides/ipa>
Tracing guide <guides/tracing>
Environment variables <environment_variables>
Sensor driver requirements <sensor_driver_requirements>
Lens driver requirements <lens_driver_requirements>
Python Bindings <python-bindings>
Camera Sensor Model <camera-sensor-model>
SoftwareISP Benchmarking <software-isp-benchmarking>

View File

@@ -0,0 +1,27 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _lens-driver-requirements:
Lens Driver Requirements
========================
libcamera handles lens devices in the CameraLens class and defines
a consistent interface through its API towards other library components.
The CameraLens class uses the V4L2 subdev kernel API to interface with the
camera lens through a sub-device exposed to userspace by the lens driver.
In order for libcamera to be fully operational and provide all the required
information to interface with the camera lens to applications and pipeline
handlers, a set of mandatory features the driver has to support has been defined.
Mandatory Requirements
----------------------
The lens driver is assumed to be fully compliant with the V4L2 specification.
The lens driver shall support the following V4L2 controls:
* `V4L2_CID_FOCUS_ABSOLUTE`_
.. _V4L2_CID_FOCUS_ABSOLUTE: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-camera.html

View File

@@ -0,0 +1,105 @@
# SPDX-License-Identifier: CC0-1.0
doc_install_dir = get_option('datadir') / 'doc' / 'libcamera-@0@'.format(libcamera_version)
#
# Doxygen
#
doxygen = find_program('doxygen', required : get_option('documentation'))
dot = find_program('dot', required : get_option('documentation'))
if doxygen.found() and dot.found()
cdata = configuration_data()
cdata.set('VERSION', 'v@0@'.format(libcamera_git_version))
cdata.set('TOP_SRCDIR', meson.project_source_root())
cdata.set('TOP_BUILDDIR', meson.project_build_root())
cdata.set('OUTPUT_DIR', meson.current_build_dir())
cdata.set('WARN_AS_ERROR', get_option('doc_werror') ? 'YES' : 'NO')
doxygen_predefined = []
foreach key : config_h.keys()
doxygen_predefined += '@0@=@1@'.format(key, config_h.get(key))
endforeach
cdata.set('PREDEFINED', ' \\\n\t\t\t '.join(doxygen_predefined))
doxyfile = configure_file(input : 'Doxyfile.in',
output : 'Doxyfile',
configuration : cdata)
doxygen_input = [
doxyfile,
libcamera_base_headers,
libcamera_base_sources,
libcamera_internal_headers,
libcamera_ipa_headers,
libcamera_ipa_interfaces,
libcamera_public_headers,
libcamera_sources,
libipa_headers,
libipa_sources,
]
if is_variable('ipu3_ipa_sources')
doxygen_input += [ipu3_ipa_sources]
endif
custom_target('doxygen',
input : doxygen_input,
output : 'api-html',
command : [doxygen, doxyfile],
install : true,
install_dir : doc_install_dir,
install_tag : 'doc')
endif
#
# Sphinx
#
sphinx = find_program('sphinx-build-3', required : false)
if not sphinx.found()
sphinx = find_program('sphinx-build', required : get_option('documentation'))
endif
if sphinx.found()
docs_sources = [
'camera-sensor-model.rst',
'code-of-conduct.rst',
'coding-style.rst',
'conf.py',
'contributing.rst',
'docs.rst',
'environment_variables.rst',
'guides/application-developer.rst',
'guides/introduction.rst',
'guides/ipa.rst',
'guides/pipeline-handler.rst',
'guides/tracing.rst',
'index.rst',
'lens_driver_requirements.rst',
'python-bindings.rst',
'sensor_driver_requirements.rst',
'software-isp-benchmarking.rst',
'../README.rst',
]
release = 'release=v' + libcamera_git_version
custom_target('documentation',
command : [sphinx, '-D', release, '-q', '-W', '-b', 'html',
meson.current_source_dir(), '@OUTPUT@'],
input : docs_sources,
output : 'html',
build_by_default : true,
install : true,
install_dir : doc_install_dir,
install_tag : 'doc')
custom_target('documentation-linkcheck',
command : [sphinx, '-W', '-b', 'linkcheck', meson.current_source_dir(), '@OUTPUT@'],
build_always_stale : true,
input : docs_sources,
output : 'linkcheck')
endif

View File

@@ -0,0 +1,70 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _python-bindings:
Python Bindings for libcamera
=============================
.. warning::
The bindings are under work, and the API will change.
Differences to the C++ API
--------------------------
As a rule of thumb the bindings try to follow the C++ API when possible. This
chapter lists the differences.
Mostly these differences fall under two categories:
1. Differences caused by the inherent differences between C++ and Python.
These differences are usually caused by the use of threads or differences in
C++ vs Python memory management.
2. Differences caused by the code being work-in-progress. It's not always
trivial to create a binding in a satisfying way, and the current bindings
contain simplified versions of the C++ API just to get forward. These
differences are expected to eventually go away.
Coding Style
------------
The C++ code for the bindings follows the libcamera coding style as much as
possible. Note that the indentation does not quite follow the clang-format
style, as clang-format makes a mess of the style used.
The API visible to the Python side follows the Python style as much as possible.
This means that e.g. ``Camera::generateConfiguration`` maps to
``Camera.generate_configuration``.
CameraManager
-------------
The Python API provides a singleton CameraManager via ``CameraManager.singleton()``.
There is no need to start or stop the CameraManager.
Handling Completed Requests
---------------------------
The Python bindings do not expose the ``Camera::requestCompleted`` signal
directly as the signal is invoked from another thread and it has real-time
constraints. Instead the bindings queue the completed requests internally and
use an eventfd to inform the user that there are completed requests.
The user can wait on the eventfd, and upon getting an event, use
``CameraManager.get_ready_requests()`` to clear the eventfd event and to get
the completed requests.
Controls & Properties
---------------------
The classes related to controls and properties are rather complex to implement
directly in the Python bindings. There are some simplifications in the Python
bindings:
- There is no ControlValue class. Python objects are automatically converted
to ControlValues and vice versa.
- There is no ControlList class. A Python dict with ControlId keys and Python
object values is used instead.
- There is no ControlInfoMap class. A Python dict with ControlId keys and
ControlInfo values is used instead.

View File

@@ -0,0 +1,93 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _sensor-driver-requirements:
Sensor Driver Requirements
==========================
libcamera handles imaging devices in the CameraSensor class and defines
a consistent interface through its API towards other library components.
The CameraSensor class uses the V4L2 subdev kernel API to interface with the
camera sensor through one or multiple sub-devices exposed in userspace by
the sensor driver.
In order for libcamera to be fully operational and provide all the required
information to interface with the camera sensor to applications and pipeline
handlers, a set of mandatory and optional features the driver has to support
has been defined.
Mandatory Requirements
----------------------
The sensor driver is assumed to be fully compliant with the V4L2 specification.
For RAW sensors, the sensor driver shall support the following V4L2 controls:
* `V4L2_CID_ANALOGUE_GAIN`_
* `V4L2_CID_EXPOSURE`_
* `V4L2_CID_HBLANK`_
* `V4L2_CID_PIXEL_RATE`_
* `V4L2_CID_VBLANK`_
.. _V4L2_CID_ANALOGUE_GAIN: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-source.html
.. _V4L2_CID_EXPOSURE: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/control.html
.. _V4L2_CID_HBLANK: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-source.html
.. _V4L2_CID_PIXEL_RATE: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-process.html
.. _V4L2_CID_VBLANK: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-source.html
The ``ANALOGUE_GAIN`` control units are sensor-specific. libcamera requires
a sensor-specific CameraSensorHelper implementation to translate between the
sensor specific ``gain code`` and the analogue ``gain value`` expressed as an
absolute number as defined by ``controls::AnalogueGain``.
While V4L2 doesn't specify a unit for the ``EXPOSURE`` control, libcamera
requires it to be expressed as a number of image lines. Camera sensor drivers
that do not comply with this requirement will need to be adapted or will produce
incorrect results.
The ``HBLANK``, ``PIXEL_RATE`` and ``VBLANK`` controls are used to compute the
sensor output timings.
Optional Requirements
---------------------
The sensor driver should support the following V4L2 controls:
* `V4L2_CID_CAMERA_ORIENTATION`_
* `V4L2_CID_CAMERA_SENSOR_ROTATION`_
.. _V4L2_CID_CAMERA_ORIENTATION: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-camera.html
.. _V4L2_CID_CAMERA_SENSOR_ROTATION: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-camera.html
The controls are used to register the camera location and rotation.
In order to support rotating the image the sensor driver should support
* `V4L2_CID_HFLIP`_
* `V4L2_CID_VFLIP`_
.. _V4L2_CID_HFLIP: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/control.html
.. _V4L2_CID_VFLIP: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/control.html
The controls must be writable from userspace. In case of a RAW Bayer sensors,
drivers should correctly report if vertical/horizontal flips modify the Bayer
pattern ordering by reporting the `V4L2_CTRL_FLAG_MODIFY_LAYOUT` control flag.
The sensor driver should implement support for the V4L2 Selection API,
specifically it should implement support for the
`VIDIOC_SUBDEV_G_SELECTION`_ ioctl with support for the following selection
targets:
.. _VIDIOC_SUBDEV_G_SELECTION: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/vidioc-subdev-g-selection.html#c.V4L.VIDIOC_SUBDEV_G_SELECTION
* `V4L2_SEL_TGT_CROP_BOUNDS`_ to report the readable pixel array area size
* `V4L2_SEL_TGT_CROP_DEFAULT`_ to report the active pixel array area size
* `V4L2_SEL_TGT_CROP`_ to report the analogue selection rectangle
Support for the selection API is scheduled to become a mandatory feature in
the near future.
.. _V4L2_SEL_TGT_CROP_BOUNDS: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/v4l2-selection-targets.html
.. _V4L2_SEL_TGT_CROP_DEFAULT: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/v4l2-selection-targets.html
.. _V4L2_SEL_TGT_CROP: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/v4l2-selection-targets.html

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 171 KiB

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 80 KiB

View File

@@ -0,0 +1,77 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _software-isp-benchmarking:
Software ISP benchmarking
=========================
The Software ISP is particularly sensitive to performance regressions therefore
it is a good idea to always benchmark the Software ISP before and after making
changes to it and ensure that there are no performance regressions.
DebayerCpu class builtin benchmark
----------------------------------
The DebayerCpu class has a builtin benchmark. This benchmark measures the time
spent on processing (collecting statistics and debayering) only, it does not
measure the time spent on capturing or outputting the frames.
The builtin benchmark always runs. So this can be used by simply running "cam"
or "qcam" with a pipeline using the Software ISP.
When it runs it will skip measuring the first 30 frames to allow the caches and
the CPU temperature (turbo-ing) to warm-up and then it measures 30 fps and shows
the total and per frame processing time using an info level log message:
.. code-block:: text
INFO Debayer debayer_cpu.cpp:907 Processed 30 frames in 244317us, 8143 us/frame
To get stable measurements it is advised to disable any other processes which
may cause significant CPU usage (e.g. disable wifi, bluetooth and browsers).
When possible it is also advisable to disable CPU turbo-ing and
frequency-scaling.
For example when benchmarking on a Lenovo ThinkPad X1 Yoga Gen 8, with the
charger plugged in, the CPU can be fixed to run at 2 GHz using:
.. code-block:: shell
sudo x86_energy_perf_policy --turbo-enable 0
sudo cpupower frequency-set -d 2GHz -u 2GHz
with these settings the builtin bench reports a processing time of ~7.8ms/frame
on this laptop for FHD SGRBG10 (unpacked) bayer data.
Measuring power consumption
---------------------------
Since the Software ISP is often used on mobile devices it is also important to
measure power consumption and ensure that that does not regress.
For example to measure power consumption on a Lenovo ThinkPad X1 Yoga Gen 8 it
needs to be running on battery and it should be configured with its
platform-profile (/sys/firmware/acpi/platform_profile) set to balanced and with
its default turbo and frequency-scaling behavior to match real world usage.
Then start qcam to capture a FHD picture at 30 fps and position the qcam window
so that it is fully visible. After this run the following command to monitor the
power consumption:
.. code-block:: shell
watch -n 10 cat /sys/class/power_supply/BAT0/power_now /sys/class/hwmon/hwmon6/fan?_input
Note this not only measures the power consumption in µW it also monitors the
speed of this laptop's 2 fans. This is important because depending on the
ambient temperature the 2 fans may spin up while testing and this will cause an
additional power consumption of approx. 0.5 W messing up the measurement.
After starting qcam + the watch command let the laptop sit without using it for
2 minutes for the readings to stabilize. Then check that the fans have not
turned on and manually take a couple of consecutive power readings and average
these.
On the example Lenovo ThinkPad X1 Yoga Gen 8 laptop this results in a measured
power consumption of approx. 13 W while running qcam versus approx. 4-5 W while
setting idle with its OLED panel on.

View File

@@ -0,0 +1,14 @@
{#
SPDX-License-Identifier: CC-BY-SA-4.0
#}
<footer>
<div id="signature">
{%- if show_copyright %}
{%- if hasdoc('copyright') %}
{% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
{%- else %}
{% trans copyright=copyright|e %}&copy; Copyright {{ copyright }}.{% endtrans %}
{%- endif %}
{%- endif %}
</div>
</footer>

View File

@@ -0,0 +1,109 @@
{#
SPDX-License-Identifier: CC-BY-SA-4.0
#}
{# TEMPLATE VAR SETTINGS #}
{%- set url_root = pathto('', 1) %}
{%- if url_root == '#' %}{% set url_root = '' %}{% endif %}
{%- if not embedded and docstitle %}
{%- set titlesuffix = " &mdash; "|safe + docstitle|e %}
{%- else %}
{%- set titlesuffix = "" %}
{%- endif %}
<!DOCTYPE html>
<head>
<meta charset="utf-8">
{{ metatags }}
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% block htmltitle %}
<title>{{ title|striptags|e }}{{ titlesuffix }}</title>
{% endblock %}
{# FAVICON #}
{% if favicon %}
<link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
{% endif %}
{# CSS #}
{# OPENSEARCH #}
{% if not embedded %}
{% if use_opensearch %}
<link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/>
{% endif %}
{% endif %}
{% for cssfile in css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{% for cssfile in extra_css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{%- block linktags %}
{%- if hasdoc('about') %}
<link rel="author" title="{{ _('About these documents') }}"
href="{{ pathto('about') }}"/>
{%- endif %}
{%- if hasdoc('genindex') %}
<link rel="index" title="{{ _('Index') }}"
href="{{ pathto('genindex') }}"/>
{%- endif %}
{%- if hasdoc('search') %}
<link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}"/>
{%- endif %}
{%- if hasdoc('copyright') %}
<link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}"/>
{%- endif %}
<link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}"/>
{%- if parents %}
<link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}"/>
{%- endif %}
{%- if next %}
<link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}"/>
{%- endif %}
{%- if prev %}
<link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}"/>
{%- endif %}
{%- endblock %}
{%- block extrahead %} {% endblock %}
</head>
<body role="document">
<header>
<div id="navbar">
<div class="navbar-brand">
<div class="navbar-logo"> _
+-/ \-+
| (o) |
+-----+</div>
<div class="navbar-name"><span class="text-light">lib</span>camera</div>
</div>
<div class="navbar">
{{ toctree(maxdepth=1) }}
<div class="searchbox" role="search">
<form class="search" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" />
<input type="submit" value="Go" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
</div>
</header>
<div id="content">
{# PAGE CONTENT #}
<div class="block">
{% block body %}{% endblock %}
</div>
</div>
{% include "footer.html" %}
</body>
</html>

View File

@@ -0,0 +1,63 @@
{#
SPDX-License-Identifier: CC-BY-SA-4.0
#}
{#
basic/search.html
~~~~~~~~~~~~~~~~~
Template for the search page.
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
#}
{%- extends "layout.html" %}
{% block extrahead %}
<script type="text/javascript" id="documentation_options" data-url_root="{{ pathto('', 1) }}" src="{{ pathto('_static/documentation_options.js', 1) }}"></script>
{%- for scriptfile in script_files %}
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
{%- endfor %}
<script type="text/javascript" src="_static/searchtools.js"></script>
<script type="text/javascript">
jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); });
</script>
{# this is used when loading the search index using $.ajax fails,
such as on Chrome for documents on localhost #}
<script type="text/javascript" id="searchindexloader"></script>
{% endblock %}
{% block body %}
<h1 id="search-documentation">{{ _('Search') }}</h1>
<div id="fallback" class="admonition warning">
<script type="text/javascript">$('#fallback').hide();</script>
<p>
Please activate JavaScript to enable the search functionality.
</p>
</div>
<p>
From here you can search these documents. Enter your search
words into the box below and click "search". Note that the search
function will automatically search for all of the words. Pages
containing fewer words won't appear in the result list.
</p>
<form action="" method="get">
<input type="text" name="q" value="" />
<input type="submit" value="{{ _('search') }}" />
<span id="search-progress" style="padding-left: 10px"></span>
</form>
{% if search_performed %}
<h2>{{ _('Search Results') }}</h2>
{% if not search_results %}
<p>{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}</p>
{% endif %}
{% endif %}
<div id="search-results">
{% if search_results %}
<ul>
{% for href, caption, context in search_results %}
<li><a href="{{ pathto(item.href) }}">{{ caption }}</a>
<div class="context">{{ context|e }}</div>
</li>
{% endfor %}
</ul>
{% endif %}
</div>
{% endblock %}

View File

@@ -0,0 +1,291 @@
/* SPDX-License-Identifier: CC-BY-SA-4.0 */
html {
background-image: linear-gradient(to bottom right, #4895e1, #56c3ae);
background-size: cover;
background-repeat: no-repeat;
min-height: 100vh;
}
body {
color: rgb(0, 0, 0, 0.65);
font-family: Arial, sans-serif;
margin: 0px;
}
a {
color: unset;
font-weight: bold;
text-decoration: underline dotted;
}
a.headerlink {
color: rgba(0, 0, 0, 0.2);
font-size: 70%;
padding-left: 5px;
visibility: hidden;
}
a.toc-backref {
text-decoration: none;
}
h1:hover a.headerlink,
h2:hover a.headerlink,
h3:hover a.headerlink,
h4:hover a.headerlink,
h5:hover a.headerlink,
h6:hover a.headerlink {
visibility: visible;
}
dt {
font-weight: bold;
}
.text-light {
color: rgba(255, 255, 255, 0.3);
}
div#navbar {
margin-top: 0px;
}
div.navbar-brand {
color: rgb(255, 255, 255, 1.0);
float: left;
font-size: 36px;
margin: 0px 24px 24px 24px;
}
div.navbar-logo {
float: left;
font-family: monospace;
font-size: 18px;
font-weight: bold;
white-space: pre;
}
div.navbar-name {
float: left;
color: rgb(255, 255, 255, 1.0);
font-size: 34px;
margin-top: 31px;
margin-left: 10px;
padding-top: 1px;
}
div.navbar {
float: right;
}
div.navbar p.caption {
height: 0px;
margin: 0px;
visibility: hidden;
}
div.navbar ul {
float: left;
font-size: 24px;
list-style: none;
margin-top: 42px;
margin-right: 20px;
padding-left: 0px;
}
div.navbar a {
font-weight: normal;
text-decoration: none;
}
div.navbar li {
float: left;
margin-left: 20px;
margin-right: 20px;
position: relative;
}
div.navbar li a {
color: rgb(255, 255, 255, 0.5);
position: relative;
}
div.navbar li a:before {
content: "";
position: absolute;
width: 100%;
height: 2px;
bottom: 0;
left: 0;
background-color: rgb(255, 255, 255, 0.5);
visibility: hidden;
transform: scaleX(0);
transition: all 0.3s ease-in-out 0s;
}
div.navbar li a:hover {
color: rgb(255, 255, 255, 1.0);
}
div.navbar li a:hover:before {
visibility: visible;
transform: scaleX(1);
}
div.navbar li.current a {
color: rgb(255, 255, 255, 1.0);
}
div.navbar li.current a:before {
visibility: visible;
transform: unset;
transition: unset;
}
div.navbar div.searchbox {
background-color: white;
float: right;
margin-right: 50px;
margin-top: 42px;
}
div.navbar input[type=text] {
border-width: 0;
height: 2em;
margin-left: 10px;
margin-right: 5px;
}
div.navbar input[type=submit] {
background-color: white;
background-image: url(../search.png);
background-repeat: no-repeat;
border-width: 0;
color: rgba(0, 0, 0, 0);
margin-right: 2px;
width: 20px;
}
div#frontpage {
clear: both;
padding-top: 50px;
margin-left: auto;
margin-right: auto;
width: 75%;
display: flex;
justify-content: space-between;
}
div#frontpage > div.block {
background-color: white;
border-radius: 5px;
box-shadow: 0 4px 16px 0 rgba(0, 0, 0, 0.2), 0 6px 40px 0 rgba(0, 0, 0, 0.19);
color: rgb(0, 0, 0, 0.5);
font-size: 20px;
margin-bottom: 40px;
margin-right: 20px;
margin-left: 20px;
padding: 20px 60px 20px 60px;
text-align: center;
width: 50%;
}
div#frontpage > div.block h1 {
font-size: 64px;
padding-left: 20%;
padding-right: 20%;
text-align: center;
text-shadow: 4px 4px 5px;
}
div#content {
background-color: white;
clear: both;
padding-top: 50px;
padding-bottom: 50px;
margin-left: 0px;
margin-right: 0px;
}
div#content > div.block {
font-size: 16px;
margin-right: 0px;
margin-left: 0px;
max-width: 1280px;
padding: 0px 60px 0px 60px;
text-align: justify;
}
div#content > div.block h1 {
font-size: 40px;
margin-top: 0px;
text-align: left;
}
div#content > div.block > div.section {
max-width: 800px;
}
div.local.topic {
float: right;
background-color: #fcfcff;
border: 1px dotted #4896e0;
margin-left: 20px;
margin-right: 0px;
max-width: 15em;
padding: 10px 20px 10px 10px;
text-align: left;
}
div.local.topic ul {
padding-left: 20px;
margin-bottom: 5px;
}
div.local.topic > ul:before {
content: "Contents";
display: block;
font-weight: bold;
margin-bottom: 10px;
}
div.local.topic a {
font-weight: normal;
padding-left: 10px;
text-decoration: none;
}
div.highlight-shell > div.highlight > pre,
pre.console {
background-color: #fcfcff;
border: 1px dotted #4896e0;
margin-left: 0em;
padding: 10px;
text-align: left;
}
div.highlight-default > div.highlight > pre,
pre.diagram {
background-color: #fcfcff;
border: 1px dotted #4896e0;
font-size: 12px;
margin-left: 0em;
padding: 10px;
text-align: left;
width: 47em;
}
div#signature {
color: rgb(255, 255, 255, 0.5);
margin: 20px;
float: right;
font-size: 12px;
}
#libcamera div.toctree-wrapper {
height: 0px;
margin: 0px;
padding: 0px;
visibility: hidden;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 482 B

View File

@@ -0,0 +1,7 @@
# SPDX-License-Identifier: CC-BY-SA-4.0
[theme]
inherit = basic
stylesheet = css/theme.css
[options]