An attempt at getting image data back

This commit is contained in:
Sebastiaan de Schaetzen 2024-07-14 00:27:33 +02:00
parent e026bc93f7
commit 6452d2e774
1314 changed files with 218350 additions and 38 deletions

View File

@ -0,0 +1,43 @@
local function getTextY(line)
return 15 + 25 * line
end
function love.draw2()
-- Set background
love.graphics.setBackgroundColor(0, 0, 0)
love.graphics.setColor(1, 1, 1)
if BotState.camfeed then
love.graphics.draw(BotState.camfeed)
end
-- Draw time
local time
if BotState.lastMessage == 0 then
time = "Never"
else
time = math.floor(love.timer.getTime() - BotState.lastMessage) .. "s ago"
end
love.graphics.print("Last message received: " .. time, 5, 5)
-- Draw cpu battery
if BotState.cpuBatteryCorrected == nil or BotState.cpuBatteryCorrected <= 3 then
love.graphics.setColor(1, 0, 0)
else
love.graphics.setColor(1, 1, 1)
end
love.graphics.print("CPU Batt: " .. formatSafe("%.02f (%.02f) V", BotState.cpuBattery, BotState.cpuBatteryCorrected), 5, getTextY(1))
-- Draw servo battery
if BotState.servoBatteryCorrected == nil or BotState.servoBatteryCorrected <= 3 then
love.graphics.setColor(1, 0, 0)
else
love.graphics.setColor(1, 1, 1)
end
love.graphics.print("Servo Batt: " .. formatSafe("%.02f (%.02f) V", BotState.servoBattery, BotState.servoBatteryCorrected), 5, getTextY(2))
-- Draw latency
love.graphics.setColor(1, 1, 1)
love.graphics.print("Latency: " .. Ping.latency, 5, getTextY(3))
end

View File

@ -1,45 +1,70 @@
local lastMessage = 0
package.loaded["draw"] = nil
local botState = {
require("draw")
BotState = {
lastMessage = 0,
cpuBattery = nil,
cpuBatteryCorrected = nil,
servoBattery = nil,
servoBatteryCorrected = nil,
camfeed = nil,
}
function love.draw2()
love.graphics.setBackgroundColor(0, 0, 0)
Ping = {
timeSent = 0,
latency = "unknown",
payload = nil,
}
local time
if lastMessage == 0 then
time = "Never"
else
time = math.floor(love.timer.getTime() - lastMessage) .. "s ago"
function love.update2()
local now = love.timer.getTime()
if now - Ping.timeSent > 5 then
Ping.payload = ""
for i = 0, 10 do
Ping.payload = Ping.payload .. string.char(love.math.random(65, 91))
end
Ping.timeSent = now
love.mqtt.send("command/ping", Ping.payload)
print("Sending ping")
end
love.graphics.print("Last message received: " .. time, 5, 5)
love.graphics.print("CPU Batt: " .. formatSafe("%.02f V", botState.cpuBattery), 5, 30)
love.graphics.print("Servo Batt: " .. formatSafe("%.02f V", botState.servoBattery), 5, 45)
end
function formatSafe(format, value)
function formatSafe(format, value, ...)
if value == nil then
return "unknown"
end
return string.format(format, value)
return string.format(format, value, ...)
end
function love.load()
love.graphics.setFont(love.graphics.newFont(15))
love.graphics.setFont(love.graphics.newFont(20))
love.window.setFullscreen(true)
love.mqtt.subscribe("telemetry/#")
end
function love.mqtt.message(topic, payload)
local oldTime = BotState.lastMessage
BotState.lastMessage = love.timer.getTime()
if topic == "telemetry/cpu_battery" then
botState.cpuBattery = tonumber(payload)
lastMessage = love.timer.getTime()
BotState.cpuBattery = tonumber(payload)
BotState.cpuBatteryCorrected = BotState.cpuBattery / 2
elseif topic == "telemetry/servo_battery" then
botState.servoBattery = tonumber(payload)
lastMessage = love.timer.getTime()
BotState.servoBattery = tonumber(payload)
BotState.servoBatteryCorrected = BotState.servoBattery / 2
elseif topic == "telemetry/camfeed" then
print("Got camfeed")
fileData = love.filesystem.newFileData(payload, "camfeed")
BotState.camfeed = love.graphics.newImage(fileData)
elseif topic == "telemetry/pong" then
if payload == Ping.payload then
local timeReceived = love.timer.getTime()
Ping.latency = math.floor((timeReceived - Ping.timeSent) * 1000) .. "ms"
end
else
print("Got unknown telemetry at " .. topic)
BotState.lastMessage = oldTime
end
end

View File

@ -38,7 +38,8 @@ local function onCommand(command)
assert(client:subscribe {
topic = topic
})
print("Subribed to " .. topic)
print("Subscribed to " .. topic)
print("Subscribed to " .. topic)
end
end

View File

@ -6,6 +6,34 @@ services:
environment:
SPIDER_HOSTNAME: spider
spider-cam:
build: spider-cam
restart: unless-stopped
privileged: true
devices:
- /dev/v4l-subdev0
- /dev/dma_heap
- /dev/video0
- /dev/video10
- /dev/video11
- /dev/video12
- /dev/video13
- /dev/video14
- /dev/video15
- /dev/video16
- /dev/video18
- /dev/video19
- /dev/video20
- /dev/video21
- /dev/video22
- /dev/video23
- /dev/video31
- /dev/media0
- /dev/media1
- /dev/media2
- /dev/media3
- /dev/media4
spider-host:
build: spider-host
restart: unless-stopped

16
spider-cam/Dockerfile Normal file
View File

@ -0,0 +1,16 @@
FROM alpine:3.20.1
RUN apk add --no-cache git meson alpine-sdk cmake linux-headers python3 python3-dev \
py3-yaml py3-jinja2 py3-ply py3-pybind11 py3-pybind11-dev py3-paho-mqtt
#RUN apk add --no-cache libcamera libcamera-tools libcamera-v4l2 python3 python3-dev \
# cython py3-setuptools alpine-sdk ffmpeg ffmpeg-dev
WORKDIR /libcamera
ADD libcamera /libcamera
RUN meson setup --prefix /usr build && ninja -C build install
WORKDIR /app
COPY mfb.py /app
COPY spider-cam.py /app
CMD ["python3", "spider-cam.py"]

View File

@ -0,0 +1,168 @@
# SPDX-License-Identifier: GPL-2.0-only
#
# clang-format configuration file. Intended for clang-format >= 12.
#
# For more information, see:
#
# Documentation/process/clang-format.rst
# https://clang.llvm.org/docs/ClangFormat.html
# https://clang.llvm.org/docs/ClangFormatStyleOptions.html
#
---
Language: Cpp
AccessModifierOffset: -8
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: false
AlignConsecutiveDeclarations: false
AlignEscapedNewlines: Right
AlignOperands: true
AlignTrailingComments: false
AllowAllParametersOfDeclarationOnNextLine: false
AllowShortBlocksOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AllowShortFunctionsOnASingleLine: InlineOnly
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: MultiLine
BinPackArguments: true
BinPackParameters: true
BraceWrapping:
AfterClass: true
AfterControlStatement: false
AfterEnum: false
AfterFunction: true
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: false
AfterUnion: false
AfterExternBlock: false
BeforeCatch: false
BeforeElse: false
IndentBraces: false
SplitEmptyFunction: true
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakBeforeBinaryOperators: None
BreakBeforeBraces: Custom
BreakBeforeInheritanceComma: false
BreakInheritanceList: BeforeColon
BreakBeforeTernaryOperators: true
BreakConstructorInitializers: BeforeColon
BreakAfterJavaFieldAnnotations: false
BreakStringLiterals: false
ColumnLimit: 0
CommentPragmas: '^ IWYU pragma:'
CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: false
ConstructorInitializerIndentWidth: 8
ContinuationIndentWidth: 8
Cpp11BracedListStyle: false
DerivePointerAlignment: false
DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros:
- 'udev_list_entry_foreach'
IncludeBlocks: Regroup
IncludeCategories:
# Headers matching the name of the component are matched automatically.
# Priority 1
# Other library headers (explicit overrides to match before system headers)
- Regex: '(<jpeglib.h>|<libudev.h>|<tiffio.h>|<xf86drm.h>|<xf86drmMode.h>|<yaml.h>)'
Priority: 9
# Qt includes (match before C++ standard library)
- Regex: '<Q([A-Za-z0-9\-_])+>'
CaseSensitive: true
Priority: 9
# Headers in <> with an extension. (+system libraries)
- Regex: '<([A-Za-z0-9\-_])+\.h>'
Priority: 2
# System headers
- Regex: '<sys/.*>'
Priority: 2
# C++ standard library includes (no extension)
- Regex: '<([A-Za-z0-9\-_/])+>'
Priority: 2
# Linux headers, as a second group/subset of system headers
- Regex: '<linux/.*>'
Priority: 3
# Headers for libcamera Base support
- Regex: '<libcamera/base/private.h>'
Priority: 4
- Regex: '<libcamera/base/.*\.h>'
Priority: 5
# Public API Headers for libcamera, which are not in a subdir (i.e. ipa/,internal/)
- Regex: '<libcamera/([A-Za-z0-9\-_])+.h>'
Priority: 6
# IPA Interfaces
- Regex: '<libcamera/ipa/.*\.h>'
Priority: 7
# libcamera Internal headers in ""
- Regex: '"libcamera/internal/.*\.h"'
Priority: 8
# Other libraries headers with one group per library (.h or .hpp)
- Regex: '<.*/.*\.hp*>'
Priority: 9
# local modular includes "path/file.h" (.h or .hpp)
- Regex: '"(.*/)+.*\.hp*"'
Priority: 10
# Other local headers "file.h" with extension (.h or .hpp)
- Regex: '".*.hp*"'
Priority: 11
# Any unmatched line, separated from the last group
- Regex: '"*"'
Priority: 100
IncludeIsMainRegex: '(_test)?$'
IndentCaseLabels: false
IndentPPDirectives: None
IndentWidth: 8
IndentWrappedFunctionNames: false
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtTheStartOfBlocks: false
MacroBlockBegin: ''
MacroBlockEnd: ''
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCBinPackProtocolList: Auto
ObjCBlockIndentWidth: 8
ObjCSpaceAfterProperty: true
ObjCSpaceBeforeProtocolList: true
# Taken from git's rules
PenaltyBreakAssignment: 10
PenaltyBreakBeforeFirstCallParameter: 30
PenaltyBreakComment: 10
PenaltyBreakFirstLessLess: 0
PenaltyBreakString: 10
PenaltyBreakTemplateDeclaration: 10
PenaltyExcessCharacter: 100
PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Right
ReflowComments: false
SortIncludes: true
SortUsingDeclarations: true
SpaceAfterCStyleCast: false
SpaceAfterTemplateKeyword: false
SpaceBeforeAssignmentOperators: true
SpaceBeforeCpp11BracedList: false
SpaceBeforeCtorInitializerColon: true
SpaceBeforeInheritanceColon: true
SpaceBeforeParens: ControlStatements
SpaceBeforeRangeBasedForLoopColon: true
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 1
SpacesInAngles: false
SpacesInContainerLiterals: false
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Cpp11
TabWidth: 8
UseTab: Always
...

View File

@ -0,0 +1,4 @@
# SPDX-License-Identifier: CC0-1.0
Checks: -clang-diagnostic-c99-designator
FormatStyle: file

8
spider-cam/libcamera/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# SPDX-License-Identifier: CC0-1.0
/build/
/patches/
*.patch
*.pyc
__pycache__/

View File

@ -0,0 +1,31 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: libcamera
Upstream-Contact: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Source: https://git.libcamera.org/libcamera/libcamera.git/
Files: Documentation/binning.svg
Documentation/camera-sensor-model.rst
Documentation/sensor_model.svg
Copyright: Copyright 2023 Ideas On Board Oy
License: CC-BY-SA-4.0
Files: Documentation/theme/static/search.png
Copyright: 2022 Fonticons, Inc.
License: CC-BY-4.0
Files: src/ipa/rpi/vc4/data/*.json
utils/raspberrypi/ctt/ctt_config_example.json
utils/raspberrypi/ctt/ctt_ref.pgm
Copyright: 2019-2020 Raspberry Pi Ltd
License: BSD-2-Clause
Files: src/qcam/assets/feathericons/*.svg
Copyright: 2019 Cole Bemis (and other Feather icons contributors)
License: MIT
Comment: https://feathericons.com/
Files: utils/ipc/mojo
utils/ipc/tools
Copyright: Copyright 2013-2020 The Chromium Authors. All rights reserved.
License: BSD-3-Clause
Source: https://chromium.googlesource.com/chromium/src.git/

View File

@ -0,0 +1,71 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
==========
Licenses
==========
TL;DR summary: The libcamera core is covered by the LGPL-2.1-or-later license.
IPA modules included in libcamera are covered by a free software license.
Third-parties may develop IPA modules outside of libcamera and distribute them
under a closed-source license, provided they do not include source code from
the libcamera project.
The libcamera project contains multiple libraries, applications and utilities.
Licenses are expressed through SPDX tags in text-based files that support
comments, and through the .reuse/dep5 file otherwise. A copy of all licenses is
stored in the LICENSES directory.
The following text summarizes the licenses covering the different components of
the project to offer a quick overview for developers. The SPDX and DEP5
information are however authoritative and shall prevail in case of
inconsistencies with the text below.
The libcamera core source code, located under the include/libcamera/ and
src/libcamera/ directories, is fully covered by the LGPL-2.1-or-later license,
which thus covers distribution of the libcamera.so binary. Other files located
in those directories, most notably the meson build files, and various related
build scripts, may be covered by different licenses. None of their source code
is incorporated in the in the libcamera.so binary, they thus don't affect the
distribution terms of the binary.
The IPA modules, located in src/ipa/, are covered by free software licenses
chosen by the module authors. The LGPL-2.1-or-later license is recommended.
Those modules are compiled as separate binaries and dynamically loaded by the
libcamera core at runtime.
The IPA module API is defined in headers located in include/libcamera/ipa/ and
covered by the LGPL-2.1-or-later license. Using the data types (including
classes, structures and enumerations) and macros defined in the IPA module and
libcamera core API headers in IPA modules doesn't extend the LGPL license to
the IPA modules. Third-party closed-source IPA modules are thus permitted,
provided they comply with the licensing requirements of any software they
include or link to.
The libcamera Android camera HAL component is located in src/android/. The
libcamera-specific source code is covered by the LGPL-2.1-or-later license. The
component additionally contains header files and source code, located
respectively in include/android/ and src/android/metadata/, copied verbatim
from Android and covered by the Apache-2.0 license.
The libcamera GStreamer and V4L2 adaptation source code, located respectively
in src/gstreamer/ and src/v4l2/, is fully covered by the LGPL-2.1-or-later
license. Those components are compiled to separate binaries and do not
influence the license of the libcamera core.
The cam and qcam sample applications, as well as the unit tests, located
respectively in src/cam/, src/qcam/ and test/, are covered by the
GPL-2.0-or-later license. qcam additionally includes an icon set covered by the
MIT license. Those applications are compiled to separate binaries and do not
influence the license of the libcamera core.
Additional utilities are located in the utils/ directory and are covered by
various licenses. They are not part of the libcamera core and do not influence
its license.
Finally, copies of various Linux kernel headers are included in include/linux/
to avoid depending on particular versions of those headers being installed in
the system. The Linux kernel headers are covered by their respective license,
including the Linux kernel license syscall exception. Using a copy of those
headers doesn't affect libcamera licensing terms in any way compared to using
the same headers installed in the system from kernel headers packages provided
by Linux distributions.

View File

@ -0,0 +1,90 @@
# SPDX-License-Identifier: CC-BY-SA-4.0
# Doxyfile 1.9.5
PROJECT_NAME = "libcamera"
PROJECT_NUMBER = "@VERSION@"
PROJECT_BRIEF = "Supporting cameras in Linux since 2019"
OUTPUT_DIRECTORY = "@OUTPUT_DIR@"
STRIP_FROM_PATH = "@TOP_SRCDIR@"
ALIASES = "context=\xrefitem context \"Thread Safety\" \"Thread Safety\"" \
"threadbound=\ref thread-bound \"thread-bound\"" \
"threadsafe=\ref thread-safe \"thread-safe\""
EXTENSION_MAPPING = h=C++
TOC_INCLUDE_HEADINGS = 0
CASE_SENSE_NAMES = YES
QUIET = YES
WARN_AS_ERROR = @WARN_AS_ERROR@
INPUT = "@TOP_SRCDIR@/include/libcamera" \
"@TOP_SRCDIR@/src/ipa/ipu3" \
"@TOP_SRCDIR@/src/ipa/libipa" \
"@TOP_SRCDIR@/src/libcamera" \
"@TOP_BUILDDIR@/include/libcamera" \
"@TOP_BUILDDIR@/src/libcamera"
FILE_PATTERNS = *.c \
*.cpp \
*.h
RECURSIVE = YES
EXCLUDE = @TOP_SRCDIR@/include/libcamera/base/span.h \
@TOP_SRCDIR@/include/libcamera/internal/device_enumerator_sysfs.h \
@TOP_SRCDIR@/include/libcamera/internal/device_enumerator_udev.h \
@TOP_SRCDIR@/include/libcamera/internal/ipc_pipe_unixsocket.h \
@TOP_SRCDIR@/src/libcamera/device_enumerator_sysfs.cpp \
@TOP_SRCDIR@/src/libcamera/device_enumerator_udev.cpp \
@TOP_SRCDIR@/src/libcamera/ipc_pipe_unixsocket.cpp \
@TOP_SRCDIR@/src/libcamera/pipeline/ \
@TOP_SRCDIR@/src/libcamera/tracepoints.cpp \
@TOP_BUILDDIR@/include/libcamera/internal/tracepoints.h \
@TOP_BUILDDIR@/include/libcamera/ipa/soft_ipa_interface.h \
@TOP_BUILDDIR@/src/libcamera/proxy/
EXCLUDE_PATTERNS = @TOP_BUILDDIR@/include/libcamera/ipa/*_serializer.h \
@TOP_BUILDDIR@/include/libcamera/ipa/*_proxy.h \
@TOP_BUILDDIR@/include/libcamera/ipa/ipu3_*.h \
@TOP_BUILDDIR@/include/libcamera/ipa/raspberrypi_*.h \
@TOP_BUILDDIR@/include/libcamera/ipa/rkisp1_*.h \
@TOP_BUILDDIR@/include/libcamera/ipa/vimc_*.h
EXCLUDE_SYMBOLS = libcamera::BoundMethodArgs \
libcamera::BoundMethodBase \
libcamera::BoundMethodFunctor \
libcamera::BoundMethodMember \
libcamera::BoundMethodPack \
libcamera::BoundMethodPackBase \
libcamera::BoundMethodStatic \
libcamera::CameraManager::Private \
libcamera::SignalBase \
libcamera::ipa::AlgorithmFactoryBase \
*::details \
std::*
EXCLUDE_SYMLINKS = YES
HTML_OUTPUT = api-html
GENERATE_LATEX = NO
MACRO_EXPANSION = YES
EXPAND_ONLY_PREDEF = YES
INCLUDE_PATH = "@TOP_SRCDIR@/include/libcamera"
INCLUDE_FILE_PATTERNS = *.h
IMAGE_PATH = "@TOP_SRCDIR@/Documentation/images"
PREDEFINED = __DOXYGEN__ \
__cplusplus \
__attribute__(x)= \
@PREDEFINED@
HAVE_DOT = YES

View File

@ -0,0 +1,8 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _api:
API
===
:: Placeholder for Doxygen documentation

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 194 KiB

View File

@ -0,0 +1,173 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _camera-sensor-model:
.. todo: Move to Doxygen-generated documentation
The libcamera camera sensor model
=================================
libcamera defines an abstract camera sensor model in order to provide
a description of each of the processing steps that result in image data being
sent on the media bus and that form the image stream delivered to applications.
Applications should use the abstract camera sensor model defined here to
precisely control the operations of the camera sensor.
The libcamera camera sensor model targets image sensors producing frames in
RAW format, delivered through a MIPI CSI-2 compliant bus implementation.
The abstract sensor model maps libcamera components to the characteristics and
operations of an image sensor, and serves as a reference to model the libcamera
CameraSensor class and SensorConfiguration classes and operations.
In order to control the configuration of the camera sensor through the
SensorConfiguration class, applications should understand this model and map it
to the combination of image sensor and kernel driver in use.
The camera sensor model defined here is based on the *MIPI CCS specification*,
particularly on *Section 8.2 - Image readout* of *Chapter 8 - Video Timings*.
Glossary
--------
.. glossary::
Pixel array
The full grid of pixels, active and inactive ones
Pixel array active area
The portion(s) of the pixel array that contains valid and readable pixels;
corresponds to the libcamera properties::PixelArrayActiveAreas
Analog crop rectangle
The portion of the *pixel array active area* which is read out and passed
to further processing stages
Subsampling
Pixel processing techniques that reduce the image size by binning or by
skipping adjacent pixels
Digital crop
Crop of the sub-sampled image data before scaling
Frame output
The frame (image) as output on the media bus by the camera sensor
Camera sensor model
-------------------
The abstract sensor model is described in the following diagram.
.. figure:: sensor_model.svg
1. The sensor reads pixels from the *pixel array*. The pixels being read out are
selected by the *analog crop rectangle*.
2. The pixels can be subsampled to reduce the image size without affecting the
field of view. Two subsampling techniques can be used:
- Binning: combines adjacent pixels of the same colour by averaging or
summing their values, in the analog domain and/or the digital domain.
.. figure:: binning.svg
- Skipping: skips the read out of a number of adjacent pixels.
.. figure:: skipping.svg
3. The output of the optional sub-sampling stage is then cropped after the
conversion of the analogue pixel values in the digital domain.
4. The resulting output frame is sent on the media bus by the sensor.
Camera Sensor configuration parameters
--------------------------------------
The libcamera camera sensor model defines parameters that allow users to
control:
1. The image format bit depth
2. The size and position of the *Analog crop rectangle*
3. The subsampling factors used to downscale the pixel array readout data to a
smaller frame size without reducing the image *field of view*. Two
configuration parameters are made available to control the downscaling
factor:
- binning
A vertical and horizontal binning factor can be specified, the image
will be downscaled in its vertical and horizontal sizes by the specified
factor.
.. code-block:: c
:caption: Definition: The horizontal and vertical binning factors
horizontal_binning = xBin;
vertical_binning = yBin;
- skipping
Skipping reduces the image resolution by skipping the read-out of a number
of adjacent pixels. The skipping factor is specified by the 'increment'
number (number of pixels to 'skip') in the vertical and horizontal
directions and for even and odd rows and columns.
.. code-block:: c
:caption: Definition: The horizontal and vertical skipping factors
horizontal_skipping = (xOddInc + xEvenInc) / 2;
vertical_skipping = (yOddInc + yEvenInc) / 2;
Different sensors perform the binning and skipping stages in different
orders. For the sake of computing the final output image size the order of
execution is not relevant. The overall down-scaling factor is obtained by
combining the binning and skipping factors.
.. code-block:: c
:caption: Definition: The total scaling factor (binning + sub-sampling)
total_horizontal_downscale = horizontal_binning + horizontal_skipping;
total_vertical_downscale = vertical_binning + vertical_skipping;
4. The output size is used to specify any additional cropping on the sub-sampled
frame.
5. The total line length and frame height (*visibile* pixels + *blankings*) as
sent on the MIPI CSI-2 bus.
6. The pixel transmission rate on the MIPI CSI-2 bus.
The above parameters are combined to obtain the following high-level
configurations:
- **frame output size**
Obtained by applying a crop to the physical pixel array size in the analog
domain, followed by optional binning and sub-sampling (in any order),
followed by an optional crop step in the output digital domain.
- **frame rate**
The combination of the *total frame size*, the image format *bit depth* and
the *pixel rate* of the data sent on the MIPI CSI-2 bus allows to compute the
image stream frame rate. The equation is the well known:
.. code-block:: c
frame_duration = total_frame_size / pixel_rate;
frame_rate = 1 / frame_duration;
where the *pixel_rate* parameter is the result of the sensor's configuration
of the MIPI CSI-2 bus *(the following formula applies to MIPI CSI-2 when
used on MIPI D-PHY physical protocol layer only)*
.. code-block:: c
pixel_rate = csi_2_link_freq * 2 * nr_of_lanes / bits_per_sample;

View File

@ -0,0 +1,94 @@
.. SPDX-License-Identifier: CC-BY-4.0
.. _code-of-conduct:
Contributor Covenant Code of Conduct
====================================
Our Pledge
----------
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
Our Standards
-------------
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
Our Responsibilities
--------------------
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
Scope
-----
This Code of Conduct applies within all project spaces, and it also applies when
an individual is representing the project or its community in public spaces.
Examples of representing a project or community include using an official
project e-mail address, posting via an official social media account, or acting
as an appointed representative at an online or offline event. Representation of
a project may be further defined and clarified by project maintainers.
Enforcement
-----------
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at conduct@libcamera.org, or directly to
any member of the code of conduct team:
* Kieran Bingham <kieran.bingham@ideasonboard.com>
* Laurent Pinchart <laurent.pinchart@ideasonboard.com>
All complaints will be reviewed and investigated and will result in a response
that is deemed necessary and appropriate to the circumstances. The project team
is obligated to maintain confidentiality with regard to the reporter of an
incident. Further details of specific enforcement policies may be posted
separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
Attribution
-----------
This Code of Conduct is adapted from the `Contributor Covenant`_, version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
.. _Contributor Covenant: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

View File

@ -0,0 +1,429 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _coding-style-guidelines:
Coding Style Guidelines
=======================
These coding guidelines are meant to ensure code quality. As a contributor
you are expected to follow them in all code submitted to the project. While
strict compliance is desired, exceptions are tolerated when justified with
good reasons. Please read the whole coding guidelines and use common sense
to decide when departing from them is appropriate.
libcamera is written in C++, a language that has seen many revisions and
offers an extensive set of features that are easy to abuse. These coding
guidelines establish the subset of C++ used by the project.
Coding Style
------------
Even if the programming language in use is different, the project embraces the
`Linux Kernel Coding Style`_ with a few exception and some C++ specificities.
.. _Linux Kernel Coding Style: https://www.kernel.org/doc/html/latest/process/coding-style.html
In particular, from the kernel style document, the following section are adopted:
* 1 "Indentation"
* 2 "Breaking Long Lines" striving to fit code within 80 columns and
accepting up to 120 columns when necessary
* 3 "Placing Braces and Spaces"
* 3.1 "Spaces"
* 8 "Commenting" with the exception that in-function comments are not
always un-welcome.
While libcamera uses the kernel coding style for all typographic matters, the
project is a user space library, developed in a different programming language,
and the kernel guidelines fall short for this use case.
For this reason, rules and guidelines from the `Google C++ Style Guide`_ have
been adopted as well as most coding principles specified therein, with a
few exceptions and relaxed limitations on some subjects.
.. _Google C++ Style Guide: https://google.github.io/styleguide/cppguide.html
The following exceptions apply to the naming conventions specified in the
document:
* File names: libcamera uses the .cpp extensions for C++ source files and
the .h extension for header files
* Variables, function parameters, function names and class members use
camel case style, with the first letter in lower-case (as in 'camelCase'
and not 'CamelCase')
* Types (classes, structs, type aliases, and type template parameters) use
camel case, with the first letter in capital case (as in 'CamelCase' and
not 'camelCase')
* Enum members use 'CamelCase', while macros are in capital case with
underscores in between
* All formatting rules specified in the selected sections of the Linux kernel
Code Style for indentation, braces, spacing, etc
* Headers are guarded by the use of '#pragma once'
Order of Includes
~~~~~~~~~~~~~~~~~
Headers shall be included at the beginning of .c, .cpp and .h files, right
after the file description comment block and, for .h files, the header guard
macro. For .cpp files, if the file implements an API declared in a header file,
that header file shall be included first in order to ensure it is
self-contained.
While the following list is extensive, it documents the expected behaviour
defined by the clang-format configuration and tooling should assist with
ordering.
The headers shall be grouped and ordered as follows:
1. The header declaring the API being implemented (if any)
2. The C and C++ system and standard library headers
3. Linux kernel headers
4. The libcamera base private header if required
5. The libcamera base library headers
6. The libcamera public API headers
7. The libcamera IPA interfaces
8. The internal libcamera headers
9. Other libraries' headers, with one group per library
10. Local headers grouped by subdirectory
11. Any local headers
Groups of headers shall be separated by a single blank line. Headers within
each group shall be sorted alphabetically.
System and library headers shall be included with angle brackets. Project
headers shall be included with angle brackets for the libcamera public API
headers, and with double quotes for internal libcamera headers.
C++ Specific Rules
------------------
The code shall be implemented in C++17, with the following caveats:
* Type inference (auto and decltype) shall be used with caution, to avoid
drifting towards an untyped language.
* The explicit, override and final specifiers are to be used where applicable.
* Smart pointers, as well as shared pointers and weak pointers, shall not be
overused.
* Classes are encouraged to define move constructors and assignment operators
where applicable, and generally make use of the features offered by rvalue
references.
Object Ownership
~~~~~~~~~~~~~~~~
libcamera creates and destroys many objects at runtime, for both objects
internal to the library and objects exposed to the user. To guarantee proper
operation without use after free, double free or memory leaks, knowing who owns
each object at any time is crucial. The project has enacted a set of rules to
make object ownership tracking as explicit and fool-proof as possible.
In the context of this section, the terms object and instance are used
interchangeably and both refer to an instance of a class. The term reference
refers to both C++ references and C++ pointers in their capacity to refer to an
object. Passing a reference means offering a way to a callee to obtain a
reference to an object that the caller has a valid reference to. Borrowing a
reference means using a reference passed by a caller without ownership transfer
based on the assumption that the caller guarantees the validity of the
reference for the duration of the operation that borrows it.
1. Single Owner Objects
* By default an object has a single owner at any time.
* Storage of single owner objects varies depending on how the object
ownership will evolve through the lifetime of the object.
* Objects whose ownership needs to be transferred shall be stored as
std::unique_ptr<> as much as possible to emphasize the single ownership.
* Objects whose owner doesn't change may be embedded in other objects, or
stored as pointer or references. They may be stored as std::unique_ptr<>
for automatic deletion if desired.
* Ownership is transferred by passing the reference as a std::unique_ptr<>
and using std::move(). After ownership transfer the former owner has no
valid reference to the object anymore and shall not access it without first
obtaining a valid reference.
* Objects may be borrowed by passing an object reference from the owner to
the borrower, providing that
* the owner guarantees the validity of the reference for the whole duration
of the borrowing, and
* the borrower doesn't access the reference after the end of the borrowing.
When borrowing from caller to callee for the duration of a function call,
this implies that the callee shall not keep any stored reference after it
returns. These rules apply to the callee and all the functions it calls,
directly or indirectly.
When the object is stored in a std::unique_ptr<>, borrowing passes a
reference to the object, not to the std::unique_ptr<>, as
* a 'const &' when the object doesn't need to be modified and may not be
null.
* a pointer when the object may be modified or may be null. Unless
otherwise specified, pointers passed to functions are considered as
borrowed references valid for the duration of the function only.
2. Shared Objects
* Objects that may have multiple owners at a given time are called shared
objects. They are reference-counted and live as long as any references to
the object exist.
* Shared objects are created with std::make_shared<> or
std::allocate_shared<> and stored in an std::shared_ptr<>.
* Ownership is shared by creating and passing copies of any valid
std::shared_ptr<>. Ownership is released by destroying the corresponding
std::shared_ptr<>.
* When passed to a function, std::shared_ptr<> are always passed by value,
never by reference. The caller can decide whether to transfer its ownership
of the std::shared_ptr<> with std::move() or retain it. The callee shall
use std::move() if it needs to store the shared pointer.
* Do not over-use std::move(), as it may prevent copy-elision. In particular
a function returning a std::shared_ptr<> value shall not use std::move() in
its return statements, and its callers shall not wrap the function call
with std::move().
* Borrowed references to shared objects are passed as references to the
objects themselves, not to the std::shared_ptr<>, with the same rules as
for single owner objects.
These rules match the `object ownership rules from the Chromium C++ Style Guide`_.
.. _object ownership rules from the Chromium C++ Style Guide: https://chromium.googlesource.com/chromium/src/+/master/styleguide/c++/c++.md#object-ownership-and-calling-conventions
.. attention:: Long term borrowing of single owner objects is allowed. Example
use cases are implementation of the singleton pattern (where the singleton
guarantees the validity of the reference forever), or returning references
to global objects whose lifetime matches the lifetime of the application. As
long term borrowing isn't marked through language constructs, it shall be
documented explicitly in details in the API.
Global Variables
~~~~~~~~~~~~~~~~
The order of initializations and destructions of global variables cannot be
reasonably controlled. This can cause problems (including segfaults) when global
variables depend on each other, directly or indirectly. For example, if the
declaration of a global variable calls a constructor which uses another global
variable that hasn't been initialized yet, incorrect behavior is likely.
Similar issues may occur when the library is unloaded and global variables are
destroyed.
Global variables that are statically initialized and have trivial destructors
(such as an integer constant) do not cause any issue. Other global variables
shall be avoided when possible, but are allowed when required (for instance to
implement factories with auto-registration). They shall not depend on any other
global variable, should run a minimal amount of code in the constructor and
destructor, and code that contains dependencies should be moved to a later
point in time.
Error Handling
~~~~~~~~~~~~~~
Proper error handling is crucial to the stability of libcamera. The project
follows a set of high-level rules:
* Make errors impossible through API design. The best way to handle errors is
to prevent them from happening in the first place. The preferred option is
thus to prevent error conditions at the API design stage when possible.
* Detect errors at compile time. Compile-test checking of errors not only
reduces the runtime complexity, but also ensures that errors are caught early
on during development instead of during testing or, worse, in production. The
static_assert() declaration should be used where possible for this purpose.
* Validate all external API contracts. Explicit pre-condition checks shall be
used to validate API contracts. Whenever possible, appropriate errors should
be returned directly. As libcamera doesn't use exceptions, errors detected in
constructors shall result in the constructed object being marked as invalid,
with a public member function available to check validity. The checks should
be thorough for the public API, and may be lighter for internal APIs when
pre-conditions can reasonably be considered to be met through other means.
* Use assertions for fatal issues only. The ASSERT() macro causes a program
abort when compiled in debug mode, and is a no-op otherwise. It is useful to
abort execution synchronously with the error check instead of letting the
error cause problems (such as segmentation faults) later, and to provide a
detailed backtrace. Assertions shall only be used to catch conditions that are
never supposed to happen without a serious bug in libcamera that would prevent
safe recovery. They shall never be used to validate API contracts. The
assertion conditions shall not cause any side effect as they are compiled out
in non-debug mode.
C Compatibility Headers
~~~~~~~~~~~~~~~~~~~~~~~
The C++ standard defines a set of C++ standard library headers, and for some of
them, defines C compatibility headers. The former have a name of the form
<cxxx> while the later are named <xxx.h>. The C++ headers declare names in the
std namespace, and may declare the same names in the global namespace. The C
compatibility headers declare names in the global namespace, and may declare
the same names in the std namespace. Code shall not rely on the optional
declaration of names in the global or std namespace.
Usage of the C compatibility headers is preferred, except for the math.h header.
Where math.h defines separate functions for different argument types (e.g.
abs(int), labs(long int), fabs(double) and fabsf(float)) and requires the
developer to pick the right function, cmath defines overloaded functions
(std::abs(int), std::abs(long int), std::abs(double) and std::abs(float) to let
the compiler select the right function. This avoids potential errors such as
calling abs(int) with a float argument, performing an unwanted implicit integer
conversion. For this reason, cmath is preferred over math.h.
Documentation
-------------
All public and protected classes, structures, enumerations, macros, functions
and variables shall be documented with a Doxygen comment block, using the
Javadoc style with C-style comments. When documenting private member functions
and variables the same Doxygen style shall be used as for public and protected
members.
Documentation relates to header files, but shall be stored in the .cpp source
files in order to group the implementation and documentation. Every documented
header file shall have a \file documentation block in the .cpp source file.
The following comment block shows an example of correct documentation for a
member function of the PipelineHandler class.
::
/**
* \fn PipelineHandler::start()
* \brief Start capturing from a group of streams
* \param[in] camera The camera to start
*
* Start the group of streams that have been configured for capture by
* \a configureStreams(). The intended caller of this function is the Camera
* class which will in turn be called from the application to indicate that
* it has configured the streams and is ready to capture.
*
* \return 0 on success or a negative error code otherwise
*/
The comment block shall be placed right before the function it documents. If
the function is defined inline in the class definition in the header file, the
comment block shall be placed alone in the .cpp source file in the same order
as the function definitions in the header file and shall start with an \fn
line. Otherwise no \fn line shall be present.
The \brief directive shall be present. If the function takes parameters, \param
directives shall be present, with the appropriate [in], [out] or [inout]
specifiers. Only when the direction of the parameters isn't known (for instance
when defining a template function with variadic arguments) the direction
specifier shall be omitted. The \return directive shall be present when the
function returns a value, and shall be omitted otherwise.
The long description is optional. When present it shall be surrounded by empty
lines and may span multiple paragraphs. No blank lines shall otherwise be added
between the \fn, \brief, \param and \return directives.
Tools
-----
The 'clang-format' code formatting tool can be used to reformat source files
with the libcamera coding style, defined in the .clang-format file at the root
of the source tree.
As clang-format is a code formatter, it operates on full files and outputs
reformatted source code. While it can be used to reformat code before sending
patches, it may generate unrelated changes. To avoid this, libcamera provides a
'checkstyle.py' script wrapping the formatting tools to only retain related
changes. This should be used to validate modifications before submitting them
for review.
The script operates on one or multiple git commits specified on the command
line. It does not modify the git tree, the index or the working directory and
is thus safe to run at any point.
Commits are specified using the same revision range syntax as 'git log'. The
most usual use cases are to specify a single commit by sha1, branch name or tag
name, or a commit range with the <from>..<to> syntax. When no arguments are
given, the topmost commit of the current branch is selected.
::
$ ./utils/checkstyle.py cc7d204b2c51
----------------------------------------------------------------------------------
cc7d204b2c51853f7d963d144f5944e209e7ea29 libcamera: Use the logger instead of cout
----------------------------------------------------------------------------------
No style issue detected
When operating on a range of commits, style checks are performed on each commit
from oldest to newest.
::
$ ../utils/checkstyle.py 3b56ddaa96fb~3..3b56ddaa96fb
----------------------------------------------------------------------------------
b4351e1a6b83a9cfbfc331af3753602a02dbe062 libcamera: log: Fix Doxygen documentation
----------------------------------------------------------------------------------
No style issue detected
--------------------------------------------------------------------------------------
6ab3ff4501fcfa24db40fcccbce35bdded7cd4bc libcamera: log: Document the LogMessage class
--------------------------------------------------------------------------------------
No style issue detected
---------------------------------------------------------------------------------
3b56ddaa96fbccf4eada05d378ddaa1cb6209b57 build: Add 'std=c++11' cpp compiler flag
---------------------------------------------------------------------------------
Commit doesn't touch source files, skipping
Commits that do not touch any .c, .cpp or .h files are skipped.
::
$ ./utils/checkstyle.py edbd2059d8a4
----------------------------------------------------------------------
edbd2059d8a4bd759302ada4368fa4055638fd7f libcamera: Add initial logger
----------------------------------------------------------------------
--- src/libcamera/include/log.h
+++ src/libcamera/include/log.h
@@ -21,11 +21,14 @@
{
public:
LogMessage(const char *fileName, unsigned int line,
- LogSeverity severity);
- LogMessage(const LogMessage&) = delete;
+ LogSeverity severity);
+ LogMessage(const LogMessage &) = delete;
~LogMessage();
- std::ostream& stream() { return msgStream; }
+ std::ostream &stream()
+ {
+ return msgStream;
+ }
private:
std::ostringstream msgStream;
--- src/libcamera/log.cpp
+++ src/libcamera/log.cpp
@@ -42,7 +42,7 @@
static const char *log_severity_name(LogSeverity severity)
{
- static const char * const names[] = {
+ static const char *const names[] = {
"INFO",
"WARN",
" ERR",
---
2 potential style issues detected, please review
When potential style issues are detected, they are displayed in the form of a
diff that fixes the issues, on top of the corresponding commit. As the script is
in early development false positive are expected. The flagged issues should be
reviewed, but the diff doesn't need to be applied blindly.
Execution of checkstyle.py can be automated through git commit hooks. Example
of pre-commit and post-commit hooks are available in `utils/hooks/pre-commit`
and `utils/hooks/post-commit`. You can install either hook by copying it to
`.git/hooks/`. The post-commit hook is easier to start with as it will only flag
potential issues after committing, while the pre-commit hook will abort the
commit if issues are detected and requires usage of `git commit --no-verify` to
ignore false positives.
Happy hacking, libcamera awaits your patches!

View File

@ -0,0 +1,172 @@
# SPDX-License-Identifier: CC-BY-SA-4.0
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'libcamera'
copyright = '2018-2019, The libcamera documentation authors'
author = u'Kieran Bingham, Jacopo Mondi, Laurent Pinchart, Niklas Söderlund'
# Version information is provided by the build environment, through the
# sphinx command line.
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'theme'
html_theme_path = ['.']
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'libcameradoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'libcamera.tex', 'libcamera Documentation',
author, 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'libcamera', 'libcamera Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'libcamera', 'libcamera Documentation',
author, 'libcamera', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']

View File

@ -0,0 +1,142 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Contributing
============
libcamera is developed as a free software project and welcomes contributors.
Whether you would like to help with coding, documentation, testing, proposing
new features, or just discussing the project with the community, you can join
our official public communication channels, or simply check out the code.
The project adheres to a :ref:`code of conduct <code-of-conduct>` that
maintainers, contributors and community members are expected to follow in all
online and offline communication.
Mailing List
------------
We use a public mailing list as our main means of communication. You can find
subscription information and the messages archive on the `libcamera-devel`_
list information page.
.. _libcamera-devel: https://lists.libcamera.org/listinfo/libcamera-devel
IRC Channel
-----------
For informal and real time discussions, our IRC channel on irc.oftc.net is open
to the public. Point your IRC client to #libcamera to say hello, or use the
`WebChat`_.
.. _WebChat: https://webchat.oftc.net/?channels=libcamera
Source Code
-----------
libcamera is in early stages of development, and no releases are available yet.
The source code is available from the project's `git tree`_.
.. code-block:: shell
$ git clone https://git.libcamera.org/libcamera/libcamera.git
.. _git tree: https://git.libcamera.org/libcamera/libcamera.git/
A mirror is also hosted on `LinuxTV`_.
.. _LinuxTV: https://git.linuxtv.org/libcamera.git/
Issue Tracker
-------------
Our `issue tracker`_ tracks all bugs, issues and feature requests. All issues
are publicly visible, and you can register for an account to create new issues.
.. _issue tracker: https://bugs.libcamera.org/
Documentation
-------------
Project documentation is created using `Sphinx`_. Source level documentation
uses `Doxygen`_. Please make sure to document all code during development.
.. _Sphinx: https://www.sphinx-doc.org
.. _Doxygen: https://www.doxygen.nl
Submitting Patches
------------------
The libcamera project has high standards of stability, efficiency and
reliability. To achieve those, the project goes to great length to produce
code that is as easy to read, understand and maintain as possible. This is
made possible by a set of :ref:`coding-style-guidelines` that all submissions
are expected to follow.
We also care about the quality of commit messages. A good commit message not
only describes what a commit does, but why it does so. By conveying clear
information about the purpose of the commit, it helps speeding up reviews.
Regardless of whether you're new to git or have years of experience,
https://cbea.ms/git-commit/ is always a good guide to read to improve your
commit message writing skills.
The patch submission process for libcamera is similar to the Linux kernel, and
goes through the `libcamera-devel`_ mailing list. If you have no previous
experience with ``git-send-email``, or just experience trouble configuring it
for your e-mail provider, the sourcehut developers have put together a detailed
guide available at https://git-send-email.io/.
Patches submitted to the libcamera project must be certified as suitable for
integration into an open source project. As such libcamera follows the same
model as utilised by the Linux kernel, and requires the use of 'Signed-off-by:'
tags in all patches.
By signing your contributions you are certifying your work in accordance with
the following:
`Developer's Certificate of Origin`_
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
1 Letterman Drive
Suite D4700
San Francisco, CA, 94129
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
.. _Developer's Certificate of Origin: https://developercertificate.org/
.. toctree::
:hidden:
Code of Conduct <code-of-conduct>
Coding Style <coding-style>

View File

@ -0,0 +1,400 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. contents::
:local:
*************
Documentation
*************
.. toctree::
:hidden:
API <api-html/index>
API
===
The libcamera API is extensively documented using Doxygen. The :ref:`API
nightly build <api>` contains the most up-to-date API documentation, built from
the latest master branch.
Feature Requirements
====================
Device enumeration
------------------
The library shall support enumerating all camera devices available in the
system, including both fixed cameras and hotpluggable cameras. It shall
support cameras plugged and unplugged after the initialization of the
library, and shall offer a mechanism to notify applications of camera plug
and unplug.
The following types of cameras shall be supported:
* Internal cameras designed for point-and-shoot still image and video
capture usage, either controlled directly by the CPU, or exposed through
an internal USB bus as a UVC device.
* External UVC cameras designed for video conferencing usage.
Other types of camera, including analog cameras, depth cameras, thermal
cameras, external digital picture or movie cameras, are out of scope for
this project.
A hardware device that includes independent camera sensors, such as front
and back sensors in a phone, shall be considered as multiple camera devices
for the purpose of this library.
Independent Camera Devices
--------------------------
When multiple cameras are present in the system and are able to operate
independently from each other, the library shall expose them as multiple
camera devices and support parallel operation without any additional usage
restriction apart from the limitations inherent to the hardware (such as
memory bandwidth, CPU usage or number of CSI-2 receivers for instance).
Independent processes shall be able to use independent cameras devices
without interfering with each other. A single camera device shall be
usable by a single process at a time.
Multiple streams support
------------------------
The library shall support multiple video streams running in parallel
for each camera device, within the limits imposed by the system.
Per frame controls
------------------
The library shall support controlling capture parameters for each stream
on a per-frame basis, on a best effort basis based on the capabilities of the
hardware and underlying software stack (including kernel drivers and
firmware). It shall apply capture parameters to the frame they target, and
report the value of the parameters that have effectively been used for each
captured frame.
When a camera device supports multiple streams, the library shall allow both
control of each stream independently, and control of multiple streams
together. Streams that are controlled together shall be synchronized. No
synchronization is required for streams controlled independently.
Capability Enumeration
----------------------
The library shall expose capabilities of each camera device in a way that
allows applications to discover those capabilities dynamically. Applications
shall be allowed to cache capabilities for as long as they are using the
library. If capabilities can change at runtime, the library shall offer a
mechanism to notify applications of such changes. Applications shall not
cache capabilities in long term storage between runs.
Capabilities shall be discovered dynamically at runtime from the device when
possible, and may come, in part or in full, from platform configuration
data.
Device Profiles
---------------
The library may define different camera device profiles, each with a minimum
set of required capabilities. Applications may use those profiles to quickly
determine the level of features exposed by a device without parsing the full
list of capabilities. Camera devices may implement additional capabilities
on top of the minimum required set for the profile they expose.
3A and Image Enhancement Algorithms
-----------------------------------
The camera devices shall implement auto exposure, auto gain and auto white
balance. Camera devices that include a focus lens shall implement auto
focus. Additional image enhancement algorithms, such as noise reduction or
video stabilization, may be implemented.
All algorithms may be implemented in hardware or firmware outside of the
library, or in software in the library. They shall all be controllable by
applications.
The library shall be architectured to isolate the 3A and image enhancement
algorithms in a component with a documented API, respectively called the 3A
component and the 3A API. The 3A API shall be stable, and shall allow both
open-source and closed-source implementations of the 3A component.
The library may include statically-linked open-source 3A components, and
shall support dynamically-linked open-source and closed-source 3A
components.
Closed-source 3A Component Sandboxing
-------------------------------------
For security purposes, it may be desired to run closed-source 3A components
in a separate process. The 3A API would in such a case be transported over
IPC. The 3A API shall make it possible to use any IPC mechanism that
supports passing file descriptors.
The library may implement an IPC mechanism, and shall support third-party
platform-specific IPC mechanisms through the implementation of a
platform-specific 3A API wrapper. No modification to the library shall be
needed to use such third-party IPC mechanisms.
The 3A component shall not directly access any device node on the system.
Such accesses shall instead be performed through the 3A API. The library
shall validate all accesses and restrict them to what is absolutely required
by 3A components.
V4L2 Compatibility Layer
------------------------
The project shall support traditional V4L2 application through an additional
libcamera wrapper library. The wrapper library shall trap all accesses to
camera devices through `LD_PRELOAD`, and route them through libcamera to
emulate a high-level V4L2 camera device. It shall expose camera device
features on a best-effort basis, and aim for the level of features
traditionally available from a UVC camera designed for video conferencing.
Android Camera HAL v3 Compatibility
-----------------------------------
The library API shall expose all the features required to implement an
Android Camera HAL v3 on top of libcamera. Some features of the HAL may be
omitted as long as they can be implemented separately in the HAL, such as
JPEG encoding, or YUV reprocessing.
Camera Stack
============
::
a c / +-------------+ +-------------+ +-------------+ +-------------+
p a | | Native | | Framework | | Native | | Android |
p t | | V4L2 | | Application | | libcamera | | Camera |
l i | | Application | | (gstreamer) | | Application | | Framework |
i o \ +-------------+ +-------------+ +-------------+ +-------------+
n ^ ^ ^ ^
| | | |
l a | | | |
i d v v | v
b a / +-------------+ +-------------+ | +-------------+
c p | | V4L2 | | Camera | | | Android |
a t | | Compat. | | Framework | | | Camera |
m a | | | | (gstreamer) | | | HAL |
e t \ +-------------+ +-------------+ | +-------------+
r i ^ ^ | ^
a o | | | |
n | | | |
/ | ,................................................
| | ! : Language : !
l f | | ! : Bindings : !
i r | | ! : (optional) : !
b a | | \...............................................'
c m | | | | |
a e | | | | |
m w | v v v v
e o | +----------------------------------------------------------------+
r r | | |
a k | | libcamera |
| | |
\ +----------------------------------------------------------------+
^ ^ ^
Userspace | | |
------------------------ | ---------------- | ---------------- | ---------------
Kernel | | |
v v v
+-----------+ +-----------+ +-----------+
| Media | <--> | Video | <--> | V4L2 |
| Device | | Device | | Subdev |
+-----------+ +-----------+ +-----------+
The camera stack comprises four software layers. From bottom to top:
* The kernel drivers control the camera hardware and expose a
low-level interface to userspace through the Linux kernel V4L2
family of APIs (Media Controller API, V4L2 Video Device API and
V4L2 Subdev API).
* The libcamera framework is the core part of the stack. It
handles all control of the camera devices in its core component,
libcamera, and exposes a native C++ API to upper layers. Optional
language bindings allow interfacing to libcamera from other
programming languages.
Those components live in the same source code repository and
all together constitute the libcamera framework.
* The libcamera adaptation is an umbrella term designating the
components that interface to libcamera in other frameworks.
Notable examples are a V4L2 compatibility layer, a gstreamer
libcamera element, and an Android camera HAL implementation based
on libcamera.
Those components can live in the libcamera project source code
in separate repositories, or move to their respective project's
repository (for instance the gstreamer libcamera element).
* The applications and upper level frameworks are based on the
libcamera framework or libcamera adaptation, and are outside of
the scope of the libcamera project.
libcamera Architecture
======================
::
---------------------------< libcamera Public API >---------------------------
^ ^
| |
v v
+-------------+ +-------------------------------------------------+
| Camera | | Camera Device |
| Devices | | +---------------------------------------------+ |
| Manager | | | Device-Agnostic | |
+-------------+ | | | |
^ | | +------------------------+ |
| | | | ~~~~~~~~~~~~~~~~~~~~~ |
| | | | { +---------------+ } |
| | | | } | ////Image//// | { |
| | | | <-> | /Processing// | } |
| | | | } | /Algorithms// | { |
| | | | { +---------------+ } |
| | | | ~~~~~~~~~~~~~~~~~~~~~ |
| | | | ======================== |
| | | | +---------------+ |
| | | | | //Pipeline/// | |
| | | | <-> | ///Handler/// | |
| | | | | ///////////// | |
| | +--------------------+ +---------------+ |
| | Device-Specific |
| +-------------------------------------------------+
| ^ ^
| | |
v v v
+--------------------------------------------------------------------+
| Helpers and Support Classes |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| | MC & V4L2 | | Buffers | | Sandboxing | | Plugins | |
| | Support | | Allocator | | IPC | | Manager | |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| +-------------+ +-------------+ |
| | Pipeline | | ... | |
| | Runner | | | |
| +-------------+ +-------------+ |
+--------------------------------------------------------------------+
/// Device-Specific Components
~~~ Sandboxing
While offering a unified API towards upper layers, and presenting
itself as a single library, libcamera isn't monolithic. It exposes
multiple components through its public API, is built around a set of
separate helpers internally, uses device-specific components and can
load dynamic plugins.
Camera Devices Manager
The Camera Devices Manager provides a view of available cameras
in the system. It performs cold enumeration and runtime camera
management, and supports a hotplug notification mechanism in its
public API.
To avoid the cost associated with cold enumeration of all devices
at application start, and to arbitrate concurrent access to camera
devices, the Camera Devices Manager could later be split to a
separate service, possibly with integration in platform-specific
device management.
Camera Device
The Camera Device represents a camera device to upper layers. It
exposes full control of the device through the public API, and is
thus the highest level object exposed by libcamera.
Camera Device instances are created by the Camera Devices
Manager. An optional function to create new instances could be exposed
through the public API to speed up initialization when the upper
layer knows how to directly address camera devices present in the
system.
Pipeline Handler
The Pipeline Handler manages complex pipelines exposed by the kernel drivers
through the Media Controller and V4L2 APIs. It abstracts pipeline handling to
hide device-specific details to the rest of the library, and implements both
pipeline configuration based on stream configuration, and pipeline runtime
execution and scheduling when needed by the device.
This component is device-specific and is part of the libcamera code base. As
such it is covered by the same free software license as the rest of libcamera
and needs to be contributed upstream by device vendors. The Pipeline Handler
lives in the same process as the rest of the library, and has access to all
helpers and kernel camera-related devices.
Image Processing Algorithms
Together with the hardware image processing and hardware statistics
collection, the Image Processing Algorithms implement 3A (Auto-Exposure,
Auto-White Balance and Auto-Focus) and other algorithms. They run on the CPU
and interact with the kernel camera devices to control hardware image
processing based on the parameters supplied by upper layers, closing the
control loop of the ISP.
This component is device-specific and is loaded as an external plugin. It can
be part of the libcamera code base, in which case it is covered by the same
license, or provided externally as an open-source or closed-source component.
The component is sandboxed and can only interact with libcamera through
internal APIs specifically marked as such. In particular it will have no
direct access to kernel camera devices, and all its accesses to image and
metadata will be mediated by dmabuf instances explicitly passed to the
component. The component must be prepared to run in a process separate from
the main libcamera process, and to have a very restricted view of the system,
including no access to networking APIs and limited access to file systems.
The sandboxing mechanism isn't defined by libcamera. One example
implementation will be provided as part of the project, and platforms vendors
will be able to provide their own sandboxing mechanism as a plugin.
libcamera should provide a basic implementation of Image Processing
Algorithms, to serve as a reference for the internal API. Device vendors are
expected to provide a full-fledged implementation compatible with their
Pipeline Handler. One goal of the libcamera project is to create an
environment in which the community will be able to compete with the
closed-source vendor binaries and develop a high quality open source
implementation.
Helpers and Support Classes
While Pipeline Handlers are device-specific, implementations are expected to
share code due to usage of identical APIs towards the kernel camera drivers
and the Image Processing Algorithms. This includes without limitation handling
of the MC and V4L2 APIs, buffer management through dmabuf, and pipeline
discovery, configuration and scheduling. Such code will be factored out to
helpers when applicable.
Other parts of libcamera will also benefit from factoring code out to
self-contained support classes, even if such code is present only once in the
code base, in order to keep the source code clean and easy to read. This
should be the case for instance for plugin management.
V4L2 Compatibility Layer
------------------------
V4L2 compatibility is achieved through a shared library that traps all
accesses to camera devices and routes them to libcamera to emulate high-level
V4L2 camera devices. It is injected in a process address space through
`LD_PRELOAD` and is completely transparent for applications.
The compatibility layer exposes camera device features on a best-effort basis,
and aims for the level of features traditionally available from a UVC camera
designed for video conferencing.
Android Camera HAL
------------------
Camera support for Android is achieved through a generic Android
camera HAL implementation on top of libcamera. The HAL will implement internally
features required by Android and missing from libcamera, such as JPEG encoding
support.
The Android camera HAL implementation will initially target the
LIMITED hardware level, with support for the FULL level then being gradually
implemented.

View File

@ -0,0 +1,164 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Environment variables
=====================
The libcamera behaviour can be tuned through environment variables. This
document lists all the available variables and describes their usage.
List of variables
-----------------
LIBCAMERA_LOG_FILE
The custom destination for log output.
Example value: ``/home/{user}/camera_log.log``
LIBCAMERA_LOG_LEVELS
Configure the verbosity of log messages for different categories (`more <Log levels_>`__).
Example value: ``*:DEBUG``
LIBCAMERA_LOG_NO_COLOR
Disable coloring of log messages (`more <Notes about debugging_>`__).
LIBCAMERA_IPA_CONFIG_PATH
Define custom search locations for IPA configurations (`more <IPA configuration_>`__).
Example value: ``${HOME}/.libcamera/share/ipa:/opt/libcamera/vendor/share/ipa``
LIBCAMERA_IPA_FORCE_ISOLATION
When set to a non-empty string, force process isolation of all IPA modules.
Example value: ``1``
LIBCAMERA_IPA_MODULE_PATH
Define custom search locations for IPA modules (`more <IPA module_>`__).
Example value: ``${HOME}/.libcamera/lib:/opt/libcamera/vendor/lib``
LIBCAMERA_PIPELINES_MATCH_LIST
Define an ordered list of pipeline names to be used to match the media
devices in the system. The pipeline handler names used to populate the
variable are the ones passed to the REGISTER_PIPELINE_HANDLER() macro in the
source code.
Example value: ``rkisp1,simple``
LIBCAMERA_RPI_CONFIG_FILE
Define a custom configuration file to use in the Raspberry Pi pipeline handler.
Example value: ``/usr/local/share/libcamera/pipeline/rpi/vc4/minimal_mem.yaml``
Further details
---------------
Notes about debugging
~~~~~~~~~~~~~~~~~~~~~
The environment variables ``LIBCAMERA_LOG_FILE``, ``LIBCAMERA_LOG_LEVELS`` and
``LIBCAMERA_LOG_NO_COLOR`` are used to modify the default configuration of the
libcamera logger.
By default, libcamera logs all messages to the standard error (std::cerr).
Messages are colored by default depending on the log level. Coloring can be
disabled by setting the ``LIBCAMERA_LOG_NO_COLOR`` environment variable.
The default log destination can also be directed to a file by setting the
``LIBCAMERA_LOG_FILE`` environment variable to the log file name. This also
disables coloring.
Log levels are controlled through the ``LIBCAMERA_LOG_LEVELS`` variable, which
accepts a comma-separated list of 'category:level' pairs.
The `level <Log levels_>`__ part is mandatory and can either be specified by
name or by numerical index associated with each level.
The optional `category <Log categories_>`__ is a string matching the categories
defined by each file in the source base using the logging infrastructure. It
can include a wildcard ('*') character at the end to match multiple categories.
For more information refer to the `API documentation <https://libcamera.org/api-html/log_8h.html#details>`__.
Examples:
Enable full debug output to a separate file, for every `category <Log categories_>`__
within a local environment:
.. code:: bash
:~$ LIBCAMERA_LOG_FILE='/tmp/example_log.log' \
LIBCAMERA_LOG_LEVELS=0 \
cam --list
Enable full debug output for the categories ``Camera`` and ``V4L2`` within a
global environment:
.. code:: bash
:~$ export LIBCAMERA_LOG_LEVELS='Camera:DEBUG,V4L2:DEBUG'
:~$ cam --list
Log levels
~~~~~~~~~~
This is the list of available log levels, notice that all levels below
the chosen one are printed, while those above are discarded.
- DEBUG (0)
- INFO (1)
- WARN (2)
- ERROR (3)
- FATAL (4)
Example:
If you choose WARN (2), you will be able to see WARN (2), ERROR (3) and FATAL (4)
but not DEBUG (0) and INFO (1).
Log categories
~~~~~~~~~~~~~~
Every category represents a specific area of the libcamera codebase,
the names can be located within the source code, for example:
`src/libcamera/camera_manager.cpp <https://git.libcamera.org/libcamera/libcamera.git/tree/src/libcamera/camera_manager.cpp#n35>`__
.. code:: cpp
LOG_DEFINE_CATEGORY(Camera)
There are two available macros used to assign a category name to a part of the
libcamera codebase:
LOG_DEFINE_CATEGORY
This macro is required, in order to use the ``LOGC`` macro for a particular
category. It can only be used once for each category. If you want to create
log messages within multiple compilation units for the same category utilize
the ``LOG_DECLARE_CATEGORY`` macro, in every file except the definition file.
LOG_DECLARE_CATEGORY
Used for sharing an already defined category between multiple separate
compilation units.
Both macros have to be used within the libcamera namespace of the C++ source
code.
IPA configuration
~~~~~~~~~~~~~~~~~
IPA modules use configuration files to store parameters. The format and
contents of the configuration files is specific to the IPA module. They usually
contain tuning parameters for the algorithms, in JSON format.
The ``LIBCAMERA_IPA_CONFIG_PATH`` variable can be used to specify custom
storage locations to search for those configuration files.
`Examples <https://git.libcamera.org/libcamera/libcamera.git/tree/src/ipa/rpi/vc4/data>`__
IPA module
~~~~~~~~~~
In order to locate the correct IPA module for your hardware, libcamera gathers
existing IPA modules from multiple locations. The default locations for this
operation are the installed system path (for example on Debian:
``/usr/local/x86_64-pc-linux-gnu/libcamera``) and the build directory.
With the ``LIBCAMERA_IPA_MODULE_PATH``, you can specify a non-default location
to search for IPA modules.

View File

@ -0,0 +1,5 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. Getting started information is defined in the project README file.
.. include:: ../README.rst
:start-after: .. section-begin-getting-started
:end-before: .. section-end-getting-started

View File

@ -0,0 +1,639 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Using libcamera in a C++ application
====================================
This tutorial shows how to create a C++ application that uses libcamera to
interface with a camera on a system, capture frames from it for 3 seconds, and
write metadata about the frames to standard output.
Application skeleton
--------------------
Most of the code in this tutorial runs in the ``int main()`` function
with a separate global function to handle events. The two functions need
to share data, which are stored in global variables for simplicity. A
production-ready application would organize the various objects created
in classes, and the event handler would be a class member function to
provide context data without requiring global variables.
Use the following code snippets as the initial application skeleton.
It already lists all the necessary includes directives and instructs the
compiler to use the libcamera namespace, which gives access to the libcamera
defined names and types without the need of prefixing them.
.. code:: cpp
#include <iomanip>
#include <iostream>
#include <memory>
#include <thread>
#include <libcamera/libcamera.h>
using namespace libcamera;
using namespace std::chrono_literals;
int main()
{
// Code to follow
return 0;
}
Camera Manager
--------------
Every libcamera-based application needs an instance of a `CameraManager`_ that
runs for the life of the application. When the Camera Manager starts, it
enumerates all the cameras detected in the system. Behind the scenes, libcamera
abstracts and manages the complex pipelines that kernel drivers expose through
the `Linux Media Controller`_ and `Video for Linux`_ (V4L2) APIs, meaning that
an application doesn't need to handle device or driver specific details.
.. _CameraManager: https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html
.. _Linux Media Controller: https://www.kernel.org/doc/html/latest/media/uapi/mediactl/media-controller-intro.html
.. _Video for Linux: https://www.linuxtv.org/docs.php
Before the ``int main()`` function, create a global shared pointer
variable for the camera to support the event call back later:
.. code:: cpp
static std::shared_ptr<Camera> camera;
Create a Camera Manager instance at the beginning of the main function, and then
start it. An application must only create a single Camera Manager instance.
The CameraManager can be stored in a unique_ptr to automate deleting the
instance when it is no longer used, but care must be taken to ensure all
cameras are released explicitly before this happens.
.. code:: cpp
std::unique_ptr<CameraManager> cm = std::make_unique<CameraManager>();
cm->start();
During the application initialization, the Camera Manager is started to
enumerate all the supported devices and create cameras that the application can
interact with.
Once the camera manager is started, we can use it to iterate the available
cameras in the system:
.. code:: cpp
for (auto const &camera : cm->cameras())
std::cout << camera->id() << std::endl;
Printing the camera id lists the machine-readable unique identifiers, so for
example, the output on a Linux machine with a connected USB webcam is
``\_SB_.PCI0.XHC_.RHUB.HS08-8:1.0-5986:2115``.
What libcamera considers a camera
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The libcamera library considers any unique source of video frames, which usually
correspond to a camera sensor, as a single camera device. Camera devices expose
streams, which are obtained by processing data from the single image source and
all share some basic properties such as the frame duration and the image
exposure time, as they only depend by the image source configuration.
Applications select one or multiple Camera devices they wish to operate on, and
require frames from at least one of their Streams.
Create and acquire a camera
---------------------------
This example application uses a single camera (the first enumerated one) that
the Camera Manager reports as available to applications.
Camera devices are stored by the CameraManager in a list accessible by index, or
can be retrieved by name through the ``CameraManager::get()`` function. The
code below retrieves the name of the first available camera and gets the camera
by name from the Camera Manager, after making sure that at least one camera is
available.
.. code:: cpp
auto cameras = cm->cameras();
if (cameras.empty()) {
std::cout << "No cameras were identified on the system."
<< std::endl;
cm->stop();
return EXIT_FAILURE;
}
std::string cameraId = cameras[0]->id();
auto camera = cm->get(cameraId);
/*
* Note that `camera` may not compare equal to `cameras[0]`.
* In fact, it might simply be a `nullptr`, as the particular
* device might have disappeared (and reappeared) in the meantime.
*/
Once a camera has been selected an application needs to acquire an exclusive
lock to it so no other application can use it.
.. code:: cpp
camera->acquire();
Configure the camera
--------------------
Before the application can do anything with the camera, it needs to configure
the image format and sizes of the streams it wants to capture frames from.
Stream configurations are represented by instances of the
``StreamConfiguration`` class, which are grouped together in a
``CameraConfiguration`` object. Before an application can start setting its
desired configuration, a ``CameraConfiguration`` instance needs to be generated
from the ``Camera`` device using the ``Camera::generateConfiguration()``
function.
The libcamera library uses the ``StreamRole`` enumeration to define predefined
ways an application intends to use a camera. The
``Camera::generateConfiguration()`` function accepts a list of desired roles and
generates a ``CameraConfiguration`` with the best stream parameters
configuration for each of the requested roles. If the camera can handle the
requested roles, it returns an initialized ``CameraConfiguration`` and a null
pointer if it can't.
It is possible for applications to generate an empty ``CameraConfiguration``
instance by not providing any role. The desired configuration will have to be
filled-in manually and manually validated.
In the example application, create a new configuration variable and use the
``Camera::generateConfiguration`` function to produce a ``CameraConfiguration``
for the single ``StreamRole::Viewfinder`` role.
.. code:: cpp
std::unique_ptr<CameraConfiguration> config = camera->generateConfiguration( { StreamRole::Viewfinder } );
The generated ``CameraConfiguration`` has a ``StreamConfiguration`` instance for
each ``StreamRole`` the application requested. Each of these has a default size
and format that the camera assigned, and a list of supported pixel formats and
sizes.
The code below accesses the first and only ``StreamConfiguration`` item in the
``CameraConfiguration`` and outputs its parameters to standard output.
.. code:: cpp
StreamConfiguration &streamConfig = config->at(0);
std::cout << "Default viewfinder configuration is: " << streamConfig.toString() << std::endl;
This is expected to output something like:
``Default viewfinder configuration is: 1280x720-MJPEG``
Change and validate the configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With an initialized ``CameraConfiguration``, an application can make changes to
the parameters it contains, for example, to change the width and height, use the
following code:
.. code:: cpp
streamConfig.size.width = 640;
streamConfig.size.height = 480;
If an application changes any parameters, it must validate the configuration
before applying it to the camera using the ``CameraConfiguration::validate()``
function. If the new values are not supported by the ``Camera`` device, the
validation process adjusts the parameters to what it considers to be the closest
supported values.
The ``validate`` function returns a `Status`_ which applications shall check to
see if the Pipeline Handler adjusted the configuration.
.. _Status: https://libcamera.org/api-html/classlibcamera_1_1CameraConfiguration.html#a64163f21db2fe1ce0a6af5a6f6847744
For example, the code above set the width and height to 640x480, but if the
camera cannot produce an image that large, it might adjust the configuration to
the supported size of 320x240 and return ``Adjusted`` as validation status
result.
If the configuration to validate cannot be adjusted to a set of supported
values, the validation procedure fails and returns the ``Invalid`` status.
For this example application, the code below prints the adjusted values to
standard out.
.. code:: cpp
config->validate();
std::cout << "Validated viewfinder configuration is: " << streamConfig.toString() << std::endl;
For example, the output might be something like
``Validated viewfinder configuration is: 320x240-MJPEG``
A validated ``CameraConfiguration`` can bet given to the ``Camera`` device to be
applied to the system.
.. code:: cpp
camera->configure(config.get());
If an application doesn't first validate the configuration before calling
``Camera::configure()``, there's a chance that calling the function can fail, if
the given configuration would have to be adjusted.
Allocate FrameBuffers
---------------------
An application needs to reserve the memory that libcamera can write incoming
frames and data to, and that the application can then read. The libcamera
library uses ``FrameBuffer`` instances to represent memory buffers allocated in
memory. An application should reserve enough memory for the frame size the
streams need based on the configured image sizes and formats.
The libcamera library consumes buffers provided by applications as
``FrameBuffer`` instances, which makes libcamera a consumer of buffers exported
by other devices (such as displays or video encoders), or allocated from an
external allocator (such as ION on Android).
In some situations, applications do not have any means to allocate or get hold
of suitable buffers, for instance, when no other device is involved, or on Linux
platforms that lack a centralized allocator. The ``FrameBufferAllocator`` class
provides a buffer allocator an application can use in these situations.
An application doesn't have to use the default ``FrameBufferAllocator`` that
libcamera provides. It can instead allocate memory manually and pass the buffers
in ``Request``\s (read more about ``Request`` in `the frame capture section
<#frame-capture>`_ of this guide). The example in this guide covers using the
``FrameBufferAllocator`` that libcamera provides.
Using the libcamera ``FrameBufferAllocator``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Applications create a ``FrameBufferAllocator`` for a Camera and use it
to allocate buffers for streams of a ``CameraConfiguration`` with the
``allocate()`` function.
The list of allocated buffers can be retrieved using the ``Stream`` instance
as the parameter of the ``FrameBufferAllocator::buffers()`` function.
.. code:: cpp
FrameBufferAllocator *allocator = new FrameBufferAllocator(camera);
for (StreamConfiguration &cfg : *config) {
int ret = allocator->allocate(cfg.stream());
if (ret < 0) {
std::cerr << "Can't allocate buffers" << std::endl;
return -ENOMEM;
}
size_t allocated = allocator->buffers(cfg.stream()).size();
std::cout << "Allocated " << allocated << " buffers for stream" << std::endl;
}
Frame Capture
~~~~~~~~~~~~~
The libcamera library implements a streaming model based on per-frame requests.
For each frame an application wants to capture it must queue a request for it to
the camera. With libcamera, a ``Request`` is at least one ``Stream`` associated
with a ``FrameBuffer`` representing the memory location where frames have to be
stored.
First, by using the ``Stream`` instance associated to each
``StreamConfiguration``, retrieve the list of ``FrameBuffer``\s created for it
using the frame allocator. Then create a vector of requests to be submitted to
the camera.
.. code:: cpp
Stream *stream = streamConfig.stream();
const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator->buffers(stream);
std::vector<std::unique_ptr<Request>> requests;
Proceed to fill the request vector by creating ``Request`` instances from the
camera device, and associate a buffer for each of them for the ``Stream``.
.. code:: cpp
for (unsigned int i = 0; i < buffers.size(); ++i) {
std::unique_ptr<Request> request = camera->createRequest();
if (!request)
{
std::cerr << "Can't create request" << std::endl;
return -ENOMEM;
}
const std::unique_ptr<FrameBuffer> &buffer = buffers[i];
int ret = request->addBuffer(stream, buffer.get());
if (ret < 0)
{
std::cerr << "Can't set buffer for request"
<< std::endl;
return ret;
}
requests.push_back(std::move(request));
}
.. TODO: Controls
.. TODO: A request can also have controls or parameters that you can apply to the image.
Event handling and callbacks
----------------------------
The libcamera library uses the concept of `signals and slots` (similar to `Qt
Signals and Slots`_) to connect events with callbacks to handle them.
.. _signals and slots: https://libcamera.org/api-html/classlibcamera_1_1Signal.html#details
.. _Qt Signals and Slots: https://doc.qt.io/qt-6/signalsandslots.html
The ``Camera`` device emits two signals that applications can connect to in
order to execute callbacks on frame completion events.
The ``Camera::bufferCompleted`` signal notifies applications that a buffer with
image data is available. Receiving notifications about the single buffer
completion event allows applications to implement partial request completion
support, and to inspect the buffer content before the request it is part of has
fully completed.
The ``Camera::requestCompleted`` signal notifies applications that a request
has completed, which means all the buffers the request contains have now
completed. Request completion notifications are always emitted in the same order
as the requests have been queued to the camera.
To receive the signals emission notifications, connect a slot function to the
signal to handle it in the application code.
.. code:: cpp
camera->requestCompleted.connect(requestComplete);
For this example application, only the ``Camera::requestCompleted`` signal gets
handled and the matching ``requestComplete`` slot function outputs information
about the FrameBuffer to standard output. This callback is typically where an
application accesses the image data from the camera and does something with it.
Signals operate in the libcamera ``CameraManager`` thread context, so it is
important not to block the thread for a long time, as this blocks internal
processing of the camera pipelines, and can affect realtime performance.
Handle request completion events
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create the ``requestComplete`` function by matching the slot signature:
.. code:: cpp
static void requestComplete(Request *request)
{
// Code to follow
}
Request completion events can be emitted for requests which have been canceled,
for example, by unexpected application shutdown. To avoid an application
processing invalid image data, it's worth checking that the request has
completed successfully. The list of request completion statuses is available in
the `Request::Status`_ class enum documentation.
.. _Request::Status: https://www.libcamera.org/api-html/classlibcamera_1_1Request.html#a2209ba8d51af8167b25f6e3e94d5c45b
.. code:: cpp
if (request->status() == Request::RequestCancelled)
return;
If the ``Request`` has completed successfully, applications can access the
completed buffers using the ``Request::buffers()`` function, which returns a map
of ``FrameBuffer`` instances associated with the ``Stream`` that produced the
images.
.. code:: cpp
const std::map<const Stream *, FrameBuffer *> &buffers = request->buffers();
Iterating through the map allows applications to inspect each completed buffer
in this request, and access the metadata associated to each frame.
The metadata buffer contains information such the capture status, a timestamp,
and the bytes used, as described in the `FrameMetadata`_ documentation.
.. _FrameMetaData: https://libcamera.org/api-html/structlibcamera_1_1FrameMetadata.html
.. code:: cpp
for (auto bufferPair : buffers) {
FrameBuffer *buffer = bufferPair.second;
const FrameMetadata &metadata = buffer->metadata();
}
For this example application, inside the ``for`` loop from above, we can print
the Frame sequence number and details of the planes.
.. code:: cpp
std::cout << " seq: " << std::setw(6) << std::setfill('0') << metadata.sequence << " bytesused: ";
unsigned int nplane = 0;
for (const FrameMetadata::Plane &plane : metadata.planes())
{
std::cout << plane.bytesused;
if (++nplane < metadata.planes().size()) std::cout << "/";
}
std::cout << std::endl;
The expected output shows each monotonically increasing frame sequence number
and the bytes used by planes.
.. code:: text
seq: 000000 bytesused: 1843200
seq: 000002 bytesused: 1843200
seq: 000004 bytesused: 1843200
seq: 000006 bytesused: 1843200
seq: 000008 bytesused: 1843200
seq: 000010 bytesused: 1843200
seq: 000012 bytesused: 1843200
seq: 000014 bytesused: 1843200
seq: 000016 bytesused: 1843200
seq: 000018 bytesused: 1843200
seq: 000020 bytesused: 1843200
seq: 000022 bytesused: 1843200
seq: 000024 bytesused: 1843200
seq: 000026 bytesused: 1843200
seq: 000028 bytesused: 1843200
seq: 000030 bytesused: 1843200
seq: 000032 bytesused: 1843200
seq: 000034 bytesused: 1843200
seq: 000036 bytesused: 1843200
seq: 000038 bytesused: 1843200
seq: 000040 bytesused: 1843200
seq: 000042 bytesused: 1843200
A completed buffer contains of course image data which can be accessed through
the per-plane dma-buf file descriptor transported by the ``FrameBuffer``
instance. An example of how to write image data to disk is available in the
`FileSink class`_ which is a part of the ``cam`` utility application in the
libcamera repository.
.. _FileSink class: https://git.libcamera.org/libcamera/libcamera.git/tree/src/cam/file_sink.cpp
With the handling of this request completed, it is possible to re-use the
request and the associated buffers and re-queue it to the camera
device:
.. code:: cpp
request->reuse(Request::ReuseBuffers);
camera->queueRequest(request);
Request queueing
----------------
The ``Camera`` device is now ready to receive frame capture requests and
actually start delivering frames. In order to prepare for that, an application
needs to first start the camera, and queue requests to it for them to be
processed.
In the main() function, just after having connected the
``Camera::requestCompleted`` signal to the callback handler, start the camera
and queue all the previously created requests.
.. code:: cpp
camera->start();
for (std::unique_ptr<Request> &request : requests)
camera->queueRequest(request.get());
Event processing
~~~~~~~~~~~~~~~~
libcamera creates an internal execution thread at `CameraManager::start()`_
time to decouple its own event processing from the application's main thread.
Applications are thus free to manage their own execution opportunely, and only
need to respond to events generated by libcamera emitted through signals.
.. _CameraManager::start(): https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html#a49e322880a2a26013bb0076788b298c5
Real-world applications will likely either integrate with the event loop of the
framework they use, or create their own event loop to respond to user events.
For the simple application presented in this example, it is enough to prevent
immediate termination by pausing for 3 seconds. During that time, the libcamera
thread will generate request completion events that the application will handle
in the ``requestComplete()`` slot connected to the ``Camera::requestCompleted``
signal.
.. code:: cpp
std::this_thread::sleep_for(3000ms);
Clean up and stop the application
---------------------------------
The application is now finished with the camera and the resources the camera
uses, so needs to do the following:
- stop the camera
- free the buffers in the FrameBufferAllocator and delete it
- release the lock on the camera and reset the pointer to it
- stop the camera manager
.. code:: cpp
camera->stop();
allocator->free(stream);
delete allocator;
camera->release();
camera.reset();
cm->stop();
return 0;
In this instance the CameraManager will automatically be deleted by the
unique_ptr implementation when it goes out of scope.
Build and run instructions
--------------------------
To build the application, we recommend that you use the `Meson build system`_
which is also the official build system of the libcamera library.
Make sure both ``meson`` and ``libcamera`` are installed in your system. Please
refer to your distribution documentation to install meson and install the most
recent version of libcamera from the `git repository`_. You would also need to
install the ``pkg-config`` tool to correctly identify the libcamera.so object
install location in the system.
.. _Meson build system: https://mesonbuild.com/
.. _git repository: https://git.libcamera.org/libcamera/libcamera.git/
Dependencies
~~~~~~~~~~~~
The test application presented here depends on the libcamera library to be
available in a path that meson can identify. The libcamera install procedure
performed using the ``ninja install`` command may by default deploy the
libcamera components in the ``/usr/local/lib`` path, or a package manager may
install it to ``/usr/lib`` depending on your distribution. If meson is unable to
find the location of the libcamera installation, you may need to instruct meson
to look into a specific path when searching for ``libcamera.so`` by setting the
``PKG_CONFIG_PATH`` environment variable to the right location.
Adjust the following command to use the ``pkgconfig`` directory where libcamera
has been installed in your system.
.. code:: shell
export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/
Verify that ``pkg-config`` can identify the ``libcamera`` library with
.. code:: shell
$ pkg-config --libs --cflags libcamera
-I/usr/local/include/libcamera -L/usr/local/lib -lcamera -lcamera-base
``meson`` can alternatively use ``cmake`` to locate packages, please refer to
the ``meson`` documentation if you prefer to use it in place of ``pkgconfig``
Build file
~~~~~~~~~~
With the dependencies correctly identified, prepare a ``meson.build`` build file
to be placed in the same directory where the application lives. You can
name your application as you like, but be sure to update the following snippet
accordingly. In this example, the application file has been named
``simple-cam.cpp``.
.. code::
project('simple-cam', 'cpp')
simple_cam = executable('simple-cam',
'simple-cam.cpp',
dependencies: dependency('libcamera', required : true))
The ``dependencies`` line instructs meson to ask ``pkgconfig`` (or ``cmake``) to
locate the ``libcamera`` library, which the test application will be
dynamically linked against.
With the build file in place, compile and run the application with:
.. code:: shell
$ meson build
$ cd build
$ ninja
$ ./simple-cam
It is possible to increase the library debug output by using environment
variables which control the library log filtering system:
.. code:: shell
$ LIBCAMERA_LOG_LEVELS=0 ./simple-cam

View File

@ -0,0 +1,319 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Developers guide to libcamera
=============================
The Linux kernel handles multimedia devices through the 'Linux media' subsystem
and provides a set of APIs (application programming interfaces) known
collectively as V4L2 (`Video for Linux 2`_) and the `Media Controller`_ API
which provide an interface to interact and control media devices.
Included in this subsystem are drivers for camera sensors, CSI2 (Camera
Serial Interface) receivers, and ISPs (Image Signal Processors)
The usage of these drivers to provide a functioning camera stack is a
responsibility that lies in userspace which is commonly implemented separately
by vendors without a common architecture or API for application developers.
libcamera provides a complete camera stack for Linux based systems to abstract
functionality desired by camera application developers and process the
configuration of hardware and image control algorithms required to obtain
desirable results from the camera.
.. _Video for Linux 2: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/v4l2.html
.. _Media Controller: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/mediactl/media-controller.html
In this developers guide, we will explore the `Camera Stack`_ and how it is
can be visualised at a high level, and explore the internal `Architecture`_ of
the libcamera library with its components. The current `Platform Support`_ is
detailed, as well as an overview of the `Licensing`_ requirements of the
project.
This introduction is followed by a walkthrough tutorial to newcomers wishing to
support a new platform with the `Pipeline Handler Writers Guide`_ and for those
looking to make use of the libcamera native API an `Application Writers Guide`_
provides a tutorial of the key APIs exposed by libcamera.
.. _Pipeline Handler Writers Guide: pipeline-handler.html
.. _Application Writers Guide: application-developer.html
.. TODO: Correctly link to the other articles of the guide
Camera Stack
------------
The libcamera library is implemented in userspace, and makes use of underlying
kernel drivers that directly interact with hardware.
Applications can make use of libcamera through the native `libcamera API`_'s or
through an adaptation layer integrating libcamera into a larger framework.
.. _libcamera API: https://www.libcamera.org/api-html/index.html
::
Application Layer
/ +--------------+ +--------------+ +--------------+ +--------------+
| | Native | | Framework | | Native | | Android |
| | V4L2 | | Application | | libcamera | | Camera |
| | Application | | (gstreamer) | | Application | | Framework |
\ +--------------+ +--------------+ +--------------+ +--------------+
^ ^ ^ ^
| | | |
| | | |
v v | v
Adaptation Layer |
/ +--------------+ +--------------+ | +--------------+
| | V4L2 | | gstreamer | | | Android |
| | Compatibility| | element | | | Camera |
| | (preload) | |(libcamerasrc)| | | HAL |
\ +--------------+ +--------------+ | +--------------+
|
^ ^ | ^
| | | |
| | | |
v v v v
libcamera Framework
/ +--------------------------------------------------------------------+
| | |
| | libcamera |
| | |
\ +--------------------------------------------------------------------+
^ ^ ^
Userspace | | |
--------------------- | ---------------- | ---------------- | ---------------
Kernel | | |
v v v
+-----------+ +-----------+ +-----------+
| Media | <--> | Video | <--> | V4L2 |
| Device | | Device | | Subdev |
+-----------+ +-----------+ +-----------+
The camera stack comprises of four software layers. From bottom to top:
* The kernel drivers control the camera hardware and expose a low-level
interface to userspace through the Linux kernel V4L2 family of APIs
(Media Controller API, V4L2 Video Device API and V4L2 Subdev API).
* The libcamera framework is the core part of the stack. It handles all control
of the camera devices in its core component, libcamera, and exposes a native
C++ API to upper layers.
* The libcamera adaptation layer is an umbrella term designating the components
that interface to libcamera in other frameworks. Notable examples are the V4L2
compatibility layer, the gstreamer libcamera element, and the Android camera
HAL implementation based on libcamera which are provided as a part of the
libcamera project.
* The applications and upper level frameworks are based on the libcamera
framework or libcamera adaptation, and are outside of the scope of the
libcamera project, however example native applications (cam, qcam) are
provided for testing.
V4L2 Compatibility Layer
V4L2 compatibility is achieved through a shared library that traps all
accesses to camera devices and routes them to libcamera to emulate high-level
V4L2 camera devices. It is injected in a process address space through
``LD_PRELOAD`` and is completely transparent for applications.
The compatibility layer exposes camera device features on a best-effort basis,
and aims for the level of features traditionally available from a UVC camera
designed for video conferencing.
Android Camera HAL
Camera support for Android is achieved through a generic Android camera HAL
implementation on top of libcamera. The HAL implements features required by
Android and out of scope from libcamera, such as JPEG encoding support.
This component is used to provide support for ChromeOS platforms
GStreamer element (gstlibcamerasrc)
A `GStreamer element`_ is provided to allow capture from libcamera supported
devices through GStreamer pipelines, and connect to other elements for further
processing.
Development of this element is ongoing and is limited to a single stream.
Native libcamera API
Applications can make use of the libcamera API directly using the C++
API. An example application and walkthrough using the libcamera API can be
followed in the `Application Writers Guide`_
.. _GStreamer element: https://gstreamer.freedesktop.org/documentation/application-development/basics/elements.html
Architecture
------------
While offering a unified API towards upper layers, and presenting itself as a
single library, libcamera isn't monolithic. It exposes multiple components
through its public API and is built around a set of separate helpers internally.
Hardware abstractions are handled through the use of device-specific components
where required and dynamically loadable plugins are used to separate image
processing algorithms from the core libcamera codebase.
::
--------------------------< libcamera Public API >---------------------------
^ ^
| |
v v
+-------------+ +---------------------------------------------------+
| Camera | | Camera Device |
| Manager | | +-----------------------------------------------+ |
+-------------+ | | Device-Agnostic | |
^ | | | |
| | | +--------------------------+ |
| | | | ~~~~~~~~~~~~~~~~~~~~~~~ |
| | | | { +-----------------+ } |
| | | | } | //// Image //// | { |
| | | | <-> | / Processing // | } |
| | | | } | / Algorithms // | { |
| | | | { +-----------------+ } |
| | | | ~~~~~~~~~~~~~~~~~~~~~~~ |
| | | | ========================== |
| | | | +-----------------+ |
| | | | | // Pipeline /// | |
| | | | <-> | /// Handler /// | |
| | | | | /////////////// | |
| | +--------------------+ +-----------------+ |
| | Device-Specific |
| +---------------------------------------------------+
| ^ ^
| | |
v v v
+--------------------------------------------------------------------+
| Helpers and Support Classes |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| | MC & V4L2 | | Buffers | | Sandboxing | | Plugins | |
| | Support | | Allocator | | IPC | | Manager | |
| +-------------+ +-------------+ +-------------+ +-------------+ |
| +-------------+ +-------------+ |
| | Pipeline | | ... | |
| | Runner | | | |
| +-------------+ +-------------+ |
+--------------------------------------------------------------------+
/// Device-Specific Components
~~~ Sandboxing
Camera Manager
The Camera Manager enumerates cameras and instantiates Pipeline Handlers to
manage each Camera that libcamera supports. The Camera Manager supports
hotplug detection and notification events when supported by the underlying
kernel devices.
There is only ever one instance of the Camera Manager running per application.
Each application's instance of the Camera Manager ensures that only a single
application can take control of a camera device at once.
Read the `Camera Manager API`_ documentation for more details.
.. _Camera Manager API: https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html
Camera Device
The Camera class represents a single item of camera hardware that is capable
of producing one or more image streams, and provides the API to interact with
the underlying device.
If a system has multiple instances of the same hardware attached, each has its
own instance of the camera class.
The API exposes full control of the device to upper layers of libcamera through
the public API, making it the highest level object libcamera exposes, and the
object that all other API operations interact with from configuration to
capture.
Read the `Camera API`_ documentation for more details.
.. _Camera API: https://libcamera.org/api-html/classlibcamera_1_1Camera.html
Pipeline Handler
The Pipeline Handler manages the complex pipelines exposed by the kernel
drivers through the Media Controller and V4L2 APIs. It abstracts pipeline
handling to hide device-specific details from the rest of the library, and
implements both pipeline configuration based on stream configuration, and
pipeline runtime execution and scheduling when needed by the device.
The Pipeline Handler lives in the same process as the rest of the library, and
has access to all helpers and kernel camera-related devices.
Hardware abstraction is handled by device specific Pipeline Handlers which are
derived from the Pipeline Handler base class allowing commonality to be shared
among the implementations.
Derived pipeline handlers create Camera device instances based on the devices
they detect and support on the running system, and are responsible for
managing the interactions with a camera device.
More details can be found in the `PipelineHandler API`_ documentation, and the
`Pipeline Handler Writers Guide`_.
.. _PipelineHandler API: https://libcamera.org/api-html/classlibcamera_1_1PipelineHandler.html
Image Processing Algorithms
An image processing algorithm (IPA) component is a loadable plugin that
implements 3A (Auto-Exposure, Auto-White Balance, and Auto-Focus) and other
algorithms.
The algorithms run on the CPU and interact with the camera devices through the
Pipeline Handler to control hardware image processing based on the parameters
supplied by upper layers, maintaining state and closing the control loop
of the ISP.
The component is sandboxed and can only interact with libcamera through the
API provided by the Pipeline Handler and an IPA has no direct access to kernel
camera devices.
Open source IPA modules built with libcamera can be run in the same process
space as libcamera, however external IPA modules are run in a separate process
from the main libcamera process. IPA modules have a restricted view of the
system, including no access to networking APIs and limited access to file
systems.
IPA modules are only required for platforms and devices with an ISP controlled
by the host CPU. Camera sensors which have an integrated ISP are not
controlled through the IPA module.
Platform Support
----------------
The library currently supports the following hardware platforms specifically
with dedicated pipeline handlers:
- Intel IPU3 (ipu3)
- Rockchip RK3399 (rkisp1)
- RaspberryPi 3 and 4 (rpi/vc4)
Furthermore, generic platform support is provided for the following:
- USB video device class cameras (uvcvideo)
- iMX7, Allwinner Sun6i (simple)
- Virtual media controller driver for test use cases (vimc)
Licensing
---------
The libcamera core, is covered by the `LGPL-2.1-or-later`_ license. Pipeline
Handlers are a part of the libcamera code base and need to be contributed
upstream by device vendors. IPA modules included in libcamera are covered by a
free software license, however third-parties may develop IPA modules outside of
libcamera and distribute them under a closed-source license, provided they do
not include source code from the libcamera project.
The libcamera project itself contains multiple libraries, applications and
utilities. Licenses are expressed through SPDX tags in text-based files that
support comments, and through the .reuse/dep5 file otherwise. A copy of all
licenses are stored in the LICENSES directory, and a full summary of the
licensing used throughout the project can be found in the COPYING.rst document.
Applications which link dynamically against libcamera and use only the public
API are an independent work of the authors and have no license restrictions
imposed upon them from libcamera.
.. _LGPL-2.1-or-later: https://spdx.org/licenses/LGPL-2.1-or-later.html

View File

@ -0,0 +1,531 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
IPA Writer's Guide
==================
IPA modules are Image Processing Algorithm modules. They provide functionality
that the pipeline handler can use for image processing.
This guide covers the definition of the IPA interface, and how to plumb the
connection between the pipeline handler and the IPA.
The IPA interface and protocol
------------------------------
The IPA interface defines the interface between the pipeline handler and the
IPA. Specifically, it defines the functions that the IPA exposes that the
pipeline handler can call, and the signals that the pipeline handler can
connect to, in order to receive data from the IPA asynchronously. In addition,
it contains any custom data structures that the pipeline handler and IPA may
pass to each other.
It is possible to use the same IPA interface with multiple pipeline handlers
on different hardware platforms. Generally in such cases, these platforms would
have a common hardware ISP pipeline. For instance, the rkisp1 pipeline handler
supports both the RK3399 and the i.MX8MP as they integrate the same ISP.
However, the i.MX8MP has a more complex camera pipeline, which may call for a
dedicated pipeline handler in the future. As the ISP is the same as for RK3399,
the same IPA interface could be used for both pipeline handlers. The build files
provide a mapping from pipeline handler to the IPA interface name as detailed in
:ref:`compiling-section`.
The IPA protocol refers to the agreement between the pipeline handler and the
IPA regarding the expected response(s) from the IPA for given calls to the IPA.
This protocol doesn't need to be declared anywhere in code, but it shall be
documented, as there may be multiple IPA implementations for one pipeline
handler.
As part of the design of libcamera, IPAs may be isolated in a separate process,
or run in the same process but a different thread from libcamera. The pipeline
handler and IPA shall not have to change their operation based on whether the
IPA is isolated or not, but the possibility of isolation needs to be kept in
mind. Therefore all data that is passed between them must be serializable, so
they must be defined separately in the `mojo Interface Definition Language`_
(IDL), and a code generator will generate headers and serializers corresponding
to the definitions. Every interface is defined in a mojom file and includes:
- the functions that the pipeline handler can call from the IPA
- signals in the pipeline handler that the IPA can emit
- any data structures that are to be passed between the pipeline handler and the IPA
All IPA modules of a given pipeline handler use the same IPA interface. The IPA
interface definition is thus written by the pipeline handler author, based on
how they design the interactions between the pipeline handler and the IPA.
The entire IPA interface, including the functions, signals, and any custom
structs shall be defined in a file named {interface_name}.mojom under
include/libcamera/ipa/.
.. _mojo Interface Definition Language: https://chromium.googlesource.com/chromium/src.git/+/master/mojo/public/tools/bindings/README.md
Namespacing
-----------
To avoid name collisions between data types defined by different IPA interfaces
and data types defined by libcamera, each IPA interface must be defined in its
own namespace.
The namespace is specific with mojo's module directive. It must be the first
non-comment line in the mojo data definition file. For example, the Raspberry
Pi IPA interface uses:
.. code-block:: none
module ipa.rpi;
This will become the ipa::rpi namespace in C++ code.
Data containers
---------------
Since the data passed between the pipeline handler and the IPA must support
serialization, any custom data containers must be defined with the mojo IDL.
The following list of libcamera objects are supported in the interface
definition, and may be used as function parameter types or struct field types:
- libcamera.ControlInfoMap
- libcamera.ControlList
- libcamera.FileDescriptor
- libcamera.IPABuffer
- libcamera.IPACameraSensorInfo
- libcamera.IPASettings
- libcamera.IPAStream
- libcamera.Point
- libcamera.Rectangle
- libcamera.Size
- libcamera.SizeRange
To use them, core.mojom must be included in the mojo data definition file:
.. code-block:: none
import "include/libcamera/ipa/core.mojom";
Other custom structs may be defined and used as well. There is no requirement
that they must be defined before usage. enums and structs are supported.
The following is an example of a definition of an enum, for the purpose of
being used as flags:
.. code-block:: none
enum ConfigParameters {
ConfigLsTable = 0x01,
ConfigStaggeredWrite = 0x02,
ConfigSensor = 0x04,
ConfigDropFrames = 0x08,
};
The following is an example of a definition of a struct:
.. code-block:: none
struct ConfigInput {
uint32 op;
uint32 transform;
libcamera.FileDescriptor lsTableHandle;
int32 lsTableHandleStatic = -1;
map<uint32, libcamera.IPAStream> streamConfig;
array<libcamera.IPABuffer> buffers;
};
This example has some special things about it. First of all, it uses the
FileDescriptor data type. This type must be used to ensure that the file
descriptor that it contains is translated properly across the IPC boundary
(when the IPA is in an isolated process).
This does mean that if the file descriptor should be sent without being
translated (for example, for the IPA to tell the pipeline handler which
fd *that the pipeline handler holds* to act on), then it must be in a
regular int32 type.
This example also illustrates that struct fields may have default values, as
is assigned to lsTableHandleStatic. This is the value that the field will
take when the struct is constructed with the default constructor.
Arrays and maps are supported as well. They are translated to C++ vectors and
maps, respectively. The members of the arrays and maps are embedded, and cannot
be const.
Note that nullable fields, static-length arrays, handles, and unions, which
are supported by mojo, are not supported by our code generator.
The Main IPA interface
----------------------
The IPA interface is split in two parts, the Main IPA interface, which
describes the functions that the pipeline handler can call from the IPA,
and the Event IPA interface, which describes the signals received by the
pipeline handler that the IPA can emit. Both must be defined. This section
focuses on the Main IPA interface.
The main interface must be named as IPA{interface_name}Interface.
The functions that the pipeline handler can call from the IPA may be
synchronous or asynchronous. Synchronous functions do not return until the IPA
returns from the function, while asynchronous functions return immediately
without waiting for the IPA to return.
At a minimum, the following three functions must be present (and implemented):
- init();
- start();
- stop();
All three of these functions are synchronous. The parameters for start() and
init() may be customized.
init() initializes the IPA interface. It shall be called before any other
function of the IPAInterface.
stop() informs the IPA module that the camera is stopped. The IPA module shall
release resources prepared in start().
A configure() function is recommended. Any ControlInfoMap instances that will be
used by the IPA must be sent to the IPA from the pipeline handler, at configure
time, for example.
All input parameters will become const references, except for arithmetic types,
which will be passed by value. Output parameters will become pointers, unless
the first output parameter is an int32, or there is only one primitive output
parameter, in which case it will become a regular return value.
const is not allowed inside of arrays and maps. mojo arrays will become C++
std::vector<>.
By default, all functions defined in the main interface are synchronous. This
means that in the case of IPC (i.e. isolated IPA), the function call will not
return until the return value or output parameters are ready. To specify an
asynchronous function, the [async] attribute can be used. Asynchronous
functions must not have any return value or output parameters, since in the
case of IPC the call needs to return immediately.
It is also possible that the IPA will not be run in isolation. In this case,
the IPA thread will not exist until start() is called. This means that in the
case of no isolation, asynchronous calls cannot be made before start(). Since
the IPA interface must be the same regardless of isolation, the same
restriction applies to the case of isolation, and any function that will be
called before start() must be synchronous.
In addition, any call made after start() and before stop() must be
asynchronous. The motivation for this is to avoid damaging real-time
performance of the pipeline handler. If the pipeline handler wants some data
from the IPA, the IPA should return the data asynchronously via an event
(see "The Event IPA interface").
The following is an example of a main interface definition:
.. code-block:: none
interface IPARPiInterface {
init(libcamera.IPASettings settings, string sensorName)
=> (int32 ret, bool metadataSupport);
start() => (int32 ret);
stop();
configure(libcamera.IPACameraSensorInfo sensorInfo,
map<uint32, libcamera.IPAStream> streamConfig,
map<uint32, libcamera.ControlInfoMap> entityControls,
ConfigInput ipaConfig)
=> (int32 ret, ConfigOutput results);
mapBuffers(array<IPABuffer> buffers);
unmapBuffers(array<uint32> ids);
[async] signalStatReady(uint32 bufferId);
[async] signalQueueRequest(libcamera.ControlList controls);
[async] signalIspPrepare(ISPConfig data);
};
The first three functions are the required functions. Functions do not need to
have return values, like stop(), mapBuffers(), and unmapBuffers(). In the case
of asynchronous functions, as explained before, they *must not* have return
values.
The Event IPA interface
-----------------------
The event IPA interface describes the signals received by the pipeline handler
that the IPA can emit. It must be defined. If there are no event functions,
then it may be empty. These emissions are meant to notify the pipeline handler
of some event, such as request data is ready, and *must not* be used to drive
the camera pipeline from the IPA.
The event interface must be named as IPA{interface_name}EventInterface.
Functions defined in the event interface are implicitly asynchronous.
Thus they cannot return any value. Specifying the [async] tag is not
necessary.
Functions defined in the event interface will become signals in the IPA
interface. The IPA can emit signals, while the pipeline handler can connect
slots to them.
The following is an example of an event interface definition:
.. code-block:: none
interface IPARPiEventInterface {
statsMetadataComplete(uint32 bufferId,
libcamera.ControlList controls);
runIsp(uint32 bufferId);
embeddedComplete(uint32 bufferId);
setIsp(libcamera.ControlList controls);
setStaggered(libcamera.ControlList controls);
};
.. _compiling-section:
Compiling the IPA interface
---------------------------
After the IPA interface is defined in include/libcamera/ipa/{interface_name}.mojom,
an entry for it must be added in meson so that it can be compiled. The filename
must be added to the pipeline_ipa_mojom_mapping variable in
include/libcamera/ipa/meson.build. This variable maps the pipeline handler name
to its IPA interface file.
For example, adding the raspberrypi.mojom file to meson:
.. code-block:: none
pipeline_ipa_mojom_mapping = [
'rpi/vc4': 'raspberrypi.mojom',
]
This will cause the mojo data definition file to be compiled. Specifically, it
generates five files:
- a header describing the custom data structures, and the complete IPA
interface (at {$build_dir}/include/libcamera/ipa/{interface}_ipa_interface.h)
- a serializer implementing de/serialization for the custom data structures (at
{$build_dir}/include/libcamera/ipa/{interface}_ipa_serializer.h)
- a proxy header describing a specialized IPA proxy (at
{$build_dir}/include/libcamera/ipa/{interface}_ipa_proxy.h)
- a proxy source implementing the IPA proxy (at
{$build_dir}/src/libcamera/proxy/{interface}_ipa_proxy.cpp)
- a proxy worker source implementing the other end of the IPA proxy (at
{$build_dir}/src/libcamera/proxy/worker/{interface}_ipa_proxy_worker.cpp)
The IPA proxy serves as the layer between the pipeline handler and the IPA, and
handles threading vs isolation transparently. The pipeline handler and the IPA
only require the interface header and the proxy header. The serializer is only
used internally by the proxy.
Using the custom data structures
--------------------------------
To use the custom data structures that are defined in the mojo data definition
file, the following header must be included:
.. code-block:: C++
#include <libcamera/ipa/{interface_name}_ipa_interface.h>
The POD types of the structs simply become their C++ counterparts, eg. uint32
in mojo will become uint32_t in C++. mojo map becomes C++ std::map, and mojo
array becomes C++ std::vector. All members of maps and vectors are embedded,
and are not pointers. The members cannot be const.
The names of all the fields of structs can be used in C++ in exactly the same
way as they are defined in the data definition file. For example, the following
struct as defined in the mojo file:
.. code-block:: none
struct SensorConfig {
uint32 gainDelay = 1;
uint32 exposureDelay;
uint32 sensorMetadata;
};
Will become this in C++:
.. code-block:: C++
struct SensorConfig {
uint32_t gainDelay;
uint32_t exposureDelay;
uint32_t sensorMetadata;
};
The generated structs will also have two constructors, a constructor that
fills all fields with the default values, and a second constructor that takes
a value for every field. The default value constructor will fill in the fields
with the specified default value if it exists. In the above example, `gainDelay_`
will be initialized to 1. If no default value is specified, then it will be
filled in as zero (or -1 for a FileDescriptor type).
All fields and constructors/destructors in these generated structs are public.
Using the IPA interface (pipeline handler)
------------------------------------------
The following headers are necessary to use an IPA in the pipeline handler
(with raspberrypi as an example):
.. code-block:: C++
#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/ipa/raspberrypi_ipa_proxy.h>
The first header includes definitions of the custom data structures, and
the definition of the complete IPA interface (including both the Main and
the Event IPA interfaces). The name of the header file comes from the name
of the mojom file, which in this case was raspberrypi.mojom.
The second header includes the definition of the specialized IPA proxy. It
exposes the complete IPA interface. We will see how to use it in this section.
In the pipeline handler, we first need to construct a specialized IPA proxy.
From the point of view of the pipeline hander, this is the object that is the
IPA.
To do so, we invoke the IPAManager:
.. code-block:: C++
std::unique_ptr<ipa::rpi::IPAProxyRPi> ipa_ =
IPAManager::createIPA<ipa::rpi::IPAProxyRPi>(pipe_, 1, 1);
The ipa::rpi namespace comes from the namespace that we defined in the mojo
data definition file, in the "Namespacing" section. The name of the proxy,
IPAProxyRPi, comes from the name given to the main IPA interface,
IPARPiInterface, in the "The Main IPA interface" section.
The return value of IPAManager::createIPA shall be error-checked, to confirm
that the returned pointer is not a nullptr.
After this, before initializing the IPA, slots should be connected to all of
the IPA's signals, as defined in the Event IPA interface:
.. code-block:: C++
ipa_->statsMetadataComplete.connect(this, &RPiCameraData::statsMetadataComplete);
ipa_->runIsp.connect(this, &RPiCameraData::runIsp);
ipa_->embeddedComplete.connect(this, &RPiCameraData::embeddedComplete);
ipa_->setIsp.connect(this, &RPiCameraData::setIsp);
ipa_->setStaggered.connect(this, &RPiCameraData::setStaggered);
The slot functions have a function signature based on the function definition
in the Event IPA interface. All plain old data (POD) types are as-is (with
their C++ versions, eg. uint32 -> uint32_t), and all structs are const references.
For example, for the following entry in the Event IPA interface:
.. code-block:: none
statsMetadataComplete(uint32 bufferId, ControlList controls);
A function with the following function signature shall be connected to the
signal:
.. code-block:: C++
void statsMetadataComplete(uint32_t bufferId, const ControlList &controls);
After connecting the slots to the signals, the IPA should be initialized
(using the main interface definition example from earlier):
.. code-block:: C++
IPASettings settings{};
bool metadataSupport;
int ret = ipa_->init(settings, "sensor name", &metadataSupport);
At this point, any IPA functions that were defined in the Main IPA interface
can be called as if they were regular member functions, for example (based on
the main interface definition example from earlier):
.. code-block:: C++
ipa_->start();
int ret = ipa_->configure(sensorInfo_, streamConfig, entityControls, ipaConfig, &result);
ipa_->signalStatReady(RPi::BufferMask::STATS | static_cast<unsigned int>(index));
Remember that any functions designated as asynchronous *must not* be called
before start().
Notice that for both init() and configure(), the first output parameter is a
direct return, since it is an int32, while the other output parameter is a
pointer-based output parameter.
Using the IPA interface (IPA Module)
------------------------------------
The following header is necessary to implement an IPA Module (with raspberrypi
as an example):
.. code-block:: C++
#include <libcamera/ipa/raspberrypi_ipa_interface.h>
This header includes definitions of the custom data structures, and
the definition of the complete IPA interface (including both the Main and
the Event IPA interfaces). The name of the header file comes from the name
of the mojom file, which in this case was raspberrypi.mojom.
The IPA module must implement the IPA interface class that is defined in the
header. In the case of our example, that is ipa::rpi::IPARPiInterface. The
ipa::rpi namespace comes from the namespace that we defined in the mojo data
definition file, in the "Namespacing" section. The name of the interface is the
same as the name given to the Main IPA interface.
The function signature rules are the same as for the slots in the pipeline
handler side; PODs are passed by value, and structs are passed by const
reference. For the Main IPA interface, output values are also allowed (only
for synchronous calls), so there may be output parameters as well. If the
first output parameter is a POD it will be returned by value, otherwise
it will be returned by an output parameter pointer. The second and any other
output parameters will also be returned by output parameter pointers.
For example, for the following function specification in the Main IPA interface
definition:
.. code-block:: none
configure(libcamera.IPACameraSensorInfo sensorInfo,
uint32 exampleNumber,
map<uint32, libcamera.IPAStream> streamConfig,
map<uint32, libcamera.ControlInfoMap> entityControls,
ConfigInput ipaConfig)
=> (int32 ret, ConfigOutput results);
We will need to implement a function with the following function signature:
.. code-block:: C++
int configure(const IPACameraSensorInfo &sensorInfo,
uint32_t exampleNumber,
const std::map<unsigned int, IPAStream> &streamConfig,
const std::map<unsigned int, ControlInfoMap> &entityControls,
const ipa::rpi::ConfigInput &data,
ipa::rpi::ConfigOutput *response);
The return value is int, because the first output parameter is int32. The rest
of the output parameters (in this case, only response) become output parameter
pointers. The non-POD input parameters become const references, and the POD
input parameter is passed by value.
At any time after start() and before stop() (though usually only in response to
an IPA call), the IPA may send data to the pipeline handler by emitting
signals. These signals are defined in the C++ IPA interface class (which is in
the generated and included header).
For example, for the following function defined in the Event IPA interface:
.. code-block:: none
statsMetadataComplete(uint32 bufferId, libcamera.ControlList controls);
We can emit a signal like so:
.. code-block:: C++
statsMetadataComplete.emit(bufferId & RPi::BufferMask::ID, libcameraMetadata_);

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,147 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
Tracing Guide
=============
Guide to tracing in libcamera.
Profiling vs Tracing
--------------------
Tracing is recording timestamps at specific locations. libcamera provides a
tracing facility. This guide shows how to use this tracing facility.
Tracing should not be confused with profiling, which samples execution
at periodic points in time. This can be done with other tools such as
callgrind, perf, gprof, etc., without modification to the application,
and is out of scope for this guide.
Compiling
---------
To compile libcamera with tracing support, it must be enabled through the
meson ``tracing`` option. It depends on the lttng-ust library (available in the
``liblttng-ust-dev`` package for Debian-based distributions).
By default the tracing option in meson is set to ``auto``, so if
liblttng is detected, it will be enabled by default. Conversely, if the option
is set to disabled, then libcamera will be compiled without tracing support.
Defining tracepoints
--------------------
libcamera already contains a set of tracepoints. To define additional
tracepoints, create a file
``include/libcamera/internal/tracepoints/{file}.tp``, where ``file`` is a
reasonable name related to the category of tracepoints that you wish to
define. For example, the tracepoints file for the Request object is called
``request.tp``. An entry for this file must be added in
``include/libcamera/internal/tracepoints/meson.build``.
In this tracepoints file, define your tracepoints `as mandated by lttng
<https://lttng.org/man/3/lttng-ust>`_. The header boilerplate must *not* be
included (as it will conflict with the rest of our infrastructure), and
only the tracepoint definitions (with the ``TRACEPOINT_*`` macros) should be
included.
All tracepoint providers shall be ``libcamera``. According to lttng, the
tracepoint provider should be per-project; this is the rationale for this
decision. To group tracepoint events, we recommend using
``{class_name}_{tracepoint_name}``, for example, ``request_construct`` for a
tracepoint for the constructor of the Request class.
Tracepoint arguments may take C++ objects pointers, in which case the usual
C++ namespacing rules apply. The header that contains the necessary class
definitions must be included at the top of the tracepoint provider file.
Note: the final parameter in ``TP_ARGS`` *must not* have a trailing comma, and
the parameters to ``TP_FIELDS`` are *space-separated*. Not following these will
cause compilation errors.
Using tracepoints (in libcamera)
--------------------------------
To use tracepoints in libcamera, first the header needs to be included:
``#include "libcamera/internal/tracepoints.h"``
Then to use the tracepoint:
``LIBCAMERA_TRACEPOINT({tracepoint_event}, args...)``
This macro must be used, as opposed to lttng's macros directly, because
lttng is an optional dependency of libcamera, so the code must compile and run
even when lttng is not present or when tracing is disabled.
The tracepoint provider name, as declared in the tracepoint definition, is not
included in the parameters of the tracepoint.
There are also two special tracepoints available for tracing IPA calls:
``LIBCAMERA_TRACEPOINT_IPA_BEGIN({pipeline_name}, {ipa_function})``
``LIBCAMERA_TRACEPOINT_IPA_END({pipeline_name}, {ipa_function})``
These shall be placed where an IPA function is called from the pipeline handler,
and when the pipeline handler receives the corresponding response from the IPA,
respectively. These are the tracepoints that our sample analysis script
(see "Analyzing a trace") scans for when computing statistics on IPA call time.
Using tracepoints (from an application)
---------------------------------------
As applications are not part of libcamera, but rather users of libcamera,
applications should seek their own tracing mechanisms. For ease of tracing
the application alongside tracing libcamera, it is recommended to also
`use lttng <https://lttng.org/docs/#doc-tracing-your-own-user-application>`_.
Using tracepoints (from closed-source IPA)
------------------------------------------
Similar to applications, closed-source IPAs can simply use lttng on their own,
or any other tracing mechanism if desired.
Collecting a trace
------------------
A trace can be collected fairly simply from lttng:
.. code-block:: bash
lttng create $SESSION_NAME
lttng enable-event -u libcamera:\*
lttng start
# run libcamera application
lttng stop
lttng view
lttng destroy $SESSION_NAME
See the `lttng documentation <https://lttng.org/docs/>`_ for further details.
The location of the trace file is printed when running
``lttng create $SESSION_NAME``. After destroying the session, it can still be
viewed by: ``lttng view -t $PATH_TO_TRACE``, where ``$PATH_TO_TRACE`` is the
path that was printed when the session was created. This is the same path that
is used when analyzing traces programatically, as described in the next section.
Analyzing a trace
-----------------
As mentioned above, while an lttng tracing session exists and the trace is not
running, the trace output can be viewed as text by ``lttng view``.
The trace log can also be viewed as text using babeltrace2. See the
`lttng trace analysis documentation
<https://lttng.org/docs/#doc-viewing-and-analyzing-your-traces-bt>`_
for further details.
babeltrace2 also has a C API and python bindings that can be used to process
traces. See the
`lttng python bindings documentation <https://babeltrace.org/docs/v2.0/python/bt2/>`_
and the
`lttng C API documentation <https://babeltrace.org/docs/v2.0/libbabeltrace2/>`_
for more details.
As an example, there is a script ``utils/tracepoints/analyze-ipa-trace.py``
that gathers statistics for the time taken for an IPA function call, by
measuring the time difference between pairs of events
``libcamera:ipa_call_start`` and ``libcamera:ipa_call_finish``.

View File

@ -0,0 +1,132 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate0.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="1.4854147"
inkscape:cx="666.48052"
inkscape:cy="448.35962"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;paint-order:markers stroke fill;stroke-dasharray:none"
id="rect1"
width="152.88184"
height="119.41136"
x="77.237244"
y="81.982094" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;paint-order:markers stroke fill;stroke-dasharray:none"
id="rect2"
width="49.755535"
height="36.468258"
x="92.612343"
y="98.912964" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="167.25099"
y="98.912964" />
<g
id="g4"
transform="translate(-0.98582077)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 244.95942,81.765726 62.444825,81.97209 154.25639,28.65633 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-dasharray:none"
d="m 199.76751,33.368887 0.0285,21.581353"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-dasharray:none"
d="m 215.59016,33.189206 0.0959,31.330304"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.42835,33.189356 25.2821,-0.220612"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-dasharray:none"
d="m 195.19248,33.096339 -0.0701,-5.375793 23.77787,-0.05613 0.0553,5.315811"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.20874,25.616264 25.25485,-0.02536"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 195.03436,26.298566 -0.0455,-5.426692 23.77787,-0.05613 0.0553,5.315811"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

View File

@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate0Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.82900578"
inkscape:cx="599.51331"
inkscape:cy="579.00682"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-230.13463"
y="81.982094"
transform="scale(-1,1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-214.75954"
y="98.912964"
transform="scale(-1,1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-140.12088"
y="98.912964"
transform="scale(-1,1)" />
<g
id="g4"
transform="matrix(-1,0,0,1,308.35769,0)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 62.412454,81.765726 244.92705,81.97209 153.11548,28.65633 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 107.60436,33.368887 -0.0285,21.581353"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 91.781714,33.189206 -0.0959,31.330304"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 112.94352,33.189356 87.661424,32.968744"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.17939,33.096339 0.0701,-5.375793 -23.777866,-0.05613 -0.0553,5.315811"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 113.16313,25.616264 87.908284,25.590904"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.33751,26.298566 0.0455,-5.426692 -23.777866,-0.05613 -0.0553,5.315811"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate180.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-230.13461"
y="-140.22527"
transform="scale(-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-214.75951"
y="-123.2944"
transform="scale(-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-140.12086"
y="-123.2944"
transform="scale(-1)" />
<g
id="g4"
transform="rotate(180,154.17884,111.10368)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 62.412437,140.44163 182.514593,-0.20636 -91.81156,53.31576 z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 107.60435,188.83847 -0.0285,-21.58135"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 91.781697,189.01815 -0.0959,-31.3303"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.94351,189.018 -25.282103,0.22061"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.17938,189.11102 0.0701,5.37579 -23.777873,0.0561 -0.0553,-5.31581"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 113.16312,196.59109 -25.254853,0.0254"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 112.3375,195.90879 0.0455,5.42669 -23.777873,0.0561 -0.0553,-5.31581"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate180Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="77.237228"
y="-140.22527"
transform="scale(1,-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="92.612335"
y="-123.2944"
transform="scale(1,-1)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="167.25098"
y="-123.2944"
transform="scale(1,-1)" />
<g
id="g4"
transform="matrix(1,0,0,-1,-0.98584226,222.20736)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 244.9594,140.44163 62.444808,140.23527 154.25637,193.55103 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 199.76749,188.83847 0.0285,-21.58135"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 215.59014,189.01815 0.0959,-31.3303"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.42833,189.018 25.2821,0.22061"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 195.19246,189.11102 -0.0701,5.37579 23.77787,0.0561 0.0553,-5.31581"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 194.20872,196.59109 25.25485,0.0254"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 195.03434,195.90879 -0.0455,5.42669 23.77787,0.0561 0.0553,-5.31581"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate270.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.26451"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-187.55237"
y="124.56432"
transform="rotate(-90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-172.17726"
y="141.49518"
transform="rotate(-90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-97.538612"
y="141.49518"
transform="rotate(-90)" />
<g
id="g4"
transform="rotate(-90,154.17883,111.5966)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 124.34796,19.830188 124.55432,202.34478 71.238559,110.53322 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.951119,65.022101 21.58135,-0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.771439,49.199448 31.330301,-0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 75.771589,70.361261 75.550979,45.079158"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.678569,69.597131 -5.37579,0.0701 -0.0561,-23.777873 5.31581,-0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.198499,70.580871 -0.0254,-25.254853"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.880799,69.755251 -5.42669,0.0455 -0.0561,-23.777873 5.31581,-0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate270Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="-187.55237"
y="-182.80751"
transform="matrix(0,-1,-1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="-172.17726"
y="-165.87666"
transform="matrix(0,-1,-1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="-97.538612"
y="-165.87666"
transform="matrix(0,-1,-1,0,0,0)" />
<g
id="g4"
transform="matrix(0,-1,-1,0,264.78961,265.77543)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 183.02388,19.830188 -0.20636,182.514592 53.31576,-91.81156 z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.42072,65.022101 -21.58135,-0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.6004,49.199448 -31.3303,-0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.60025,70.361261 0.22061,-25.282103"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.69327,69.597131 5.37579,0.0701 0.0561,-23.777873 -5.31581,-0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 239.17334,70.580871 0.0254,-25.254853"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 238.49104,69.755251 5.42669,0.0455 0.0561,-23.777873 -5.31581,-0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate90.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.26451"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="34.65498"
y="-182.80751"
transform="rotate(90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="50.030079"
y="-165.87665"
transform="rotate(90)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="124.66872"
y="-165.87665"
transform="rotate(90)" />
<g
id="g4"
transform="rotate(90,154.17885,110.61076)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 183.02388,202.37715 -0.20636,-182.51459 53.31576,91.81156 z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.42072,157.18524 -21.58135,0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.6004,173.00789 -31.3303,0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.60025,151.84608 0.22061,25.2821"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 231.69327,152.61021 5.37579,-0.0701 0.0561,23.77787 -5.31581,0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 239.17334,151.62647 0.0254,25.25485"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 238.49104,152.45209 5.42669,-0.0455 0.0561,23.77787 -5.31581,0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

View File

@ -0,0 +1,135 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="297mm"
height="210mm"
viewBox="0 0 297 210"
version="1.1"
id="svg1"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="rotate90Mirror.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="true"
showguides="false"
inkscape:zoom="0.94272086"
inkscape:cx="467.79489"
inkscape:cy="423.24299"
inkscape:window-width="1916"
inkscape:window-height="1040"
inkscape:window-x="0"
inkscape:window-y="38"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
id="grid1"
units="px"
originx="0"
originy="0"
spacingx="0.26458334"
spacingy="0.26458333"
empcolor="#0000ff"
empopacity="0.25098039"
color="#0000ff"
opacity="0.1254902"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<defs
id="defs1" />
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect1"
width="152.88184"
height="119.41136"
x="34.65498"
y="124.56432"
transform="matrix(0,1,1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2"
width="49.755535"
height="36.468258"
x="50.030079"
y="141.49519"
transform="matrix(0,1,1,0,0,0)" />
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect2-5"
width="49.755535"
height="36.468258"
x="124.66872"
y="141.49519"
transform="matrix(0,1,1,0,0,0)" />
<g
id="g4"
transform="matrix(0,1,1,0,42.582224,-43.56809)"
style="stroke-width:1.5875;stroke-dasharray:none">
<rect
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="rect3"
width="40.994682"
height="43.605846"
x="134.16664"
y="157.24184" />
<ellipse
style="fill:none;stroke:#000000;stroke-width:1.5875;stroke-dasharray:none;paint-order:markers stroke fill"
id="path3"
cx="140.15703"
cy="176.44627"
rx="1.889045"
ry="1.925626" />
</g>
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="M 124.34795,202.37715 124.55431,19.86256 71.238554,111.67412 Z"
id="path4"
sodipodi:nodetypes="cccc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.951114,157.18524 21.58135,0.0285"
id="path5" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.771434,173.00789 31.330296,0.0959"
id="path6" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.771584,151.84608 -0.22061,25.2821"
id="path7" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 75.678564,152.61021 -5.37579,-0.0701 -0.0561,23.77787 5.31581,0.0553"
id="path8" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.198494,151.62647 -0.0254,25.25485"
id="path7-5"
sodipodi:nodetypes="cc" />
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.5875;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
d="m 68.880794,152.45209 -5.42669,-0.0455 -0.0561,23.77787 5.31581,0.0553"
id="path8-9"
sodipodi:nodetypes="cccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,27 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. Front page matter is defined in the project README file.
.. include:: ../README.rst
:start-after: .. section-begin-libcamera
:end-before: .. section-end-libcamera
.. toctree::
:maxdepth: 1
:caption: Contents:
Home <self>
Docs <docs>
Contribute <contributing>
Getting Started <getting-started>
Developer Guide <guides/introduction>
Application Writer's Guide <guides/application-developer>
Pipeline Handler Writer's Guide <guides/pipeline-handler>
IPA Writer's guide <guides/ipa>
Tracing guide <guides/tracing>
Environment variables <environment_variables>
Sensor driver requirements <sensor_driver_requirements>
Lens driver requirements <lens_driver_requirements>
Python Bindings <python-bindings>
Camera Sensor Model <camera-sensor-model>
SoftwareISP Benchmarking <software-isp-benchmarking>

View File

@ -0,0 +1,27 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _lens-driver-requirements:
Lens Driver Requirements
========================
libcamera handles lens devices in the CameraLens class and defines
a consistent interface through its API towards other library components.
The CameraLens class uses the V4L2 subdev kernel API to interface with the
camera lens through a sub-device exposed to userspace by the lens driver.
In order for libcamera to be fully operational and provide all the required
information to interface with the camera lens to applications and pipeline
handlers, a set of mandatory features the driver has to support has been defined.
Mandatory Requirements
----------------------
The lens driver is assumed to be fully compliant with the V4L2 specification.
The lens driver shall support the following V4L2 controls:
* `V4L2_CID_FOCUS_ABSOLUTE`_
.. _V4L2_CID_FOCUS_ABSOLUTE: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-camera.html

View File

@ -0,0 +1,105 @@
# SPDX-License-Identifier: CC0-1.0
doc_install_dir = get_option('datadir') / 'doc' / 'libcamera-@0@'.format(libcamera_version)
#
# Doxygen
#
doxygen = find_program('doxygen', required : get_option('documentation'))
dot = find_program('dot', required : get_option('documentation'))
if doxygen.found() and dot.found()
cdata = configuration_data()
cdata.set('VERSION', 'v@0@'.format(libcamera_git_version))
cdata.set('TOP_SRCDIR', meson.project_source_root())
cdata.set('TOP_BUILDDIR', meson.project_build_root())
cdata.set('OUTPUT_DIR', meson.current_build_dir())
cdata.set('WARN_AS_ERROR', get_option('doc_werror') ? 'YES' : 'NO')
doxygen_predefined = []
foreach key : config_h.keys()
doxygen_predefined += '@0@=@1@'.format(key, config_h.get(key))
endforeach
cdata.set('PREDEFINED', ' \\\n\t\t\t '.join(doxygen_predefined))
doxyfile = configure_file(input : 'Doxyfile.in',
output : 'Doxyfile',
configuration : cdata)
doxygen_input = [
doxyfile,
libcamera_base_headers,
libcamera_base_sources,
libcamera_internal_headers,
libcamera_ipa_headers,
libcamera_ipa_interfaces,
libcamera_public_headers,
libcamera_sources,
libipa_headers,
libipa_sources,
]
if is_variable('ipu3_ipa_sources')
doxygen_input += [ipu3_ipa_sources]
endif
custom_target('doxygen',
input : doxygen_input,
output : 'api-html',
command : [doxygen, doxyfile],
install : true,
install_dir : doc_install_dir,
install_tag : 'doc')
endif
#
# Sphinx
#
sphinx = find_program('sphinx-build-3', required : false)
if not sphinx.found()
sphinx = find_program('sphinx-build', required : get_option('documentation'))
endif
if sphinx.found()
docs_sources = [
'camera-sensor-model.rst',
'code-of-conduct.rst',
'coding-style.rst',
'conf.py',
'contributing.rst',
'docs.rst',
'environment_variables.rst',
'guides/application-developer.rst',
'guides/introduction.rst',
'guides/ipa.rst',
'guides/pipeline-handler.rst',
'guides/tracing.rst',
'index.rst',
'lens_driver_requirements.rst',
'python-bindings.rst',
'sensor_driver_requirements.rst',
'software-isp-benchmarking.rst',
'../README.rst',
]
release = 'release=v' + libcamera_git_version
custom_target('documentation',
command : [sphinx, '-D', release, '-q', '-W', '-b', 'html',
meson.current_source_dir(), '@OUTPUT@'],
input : docs_sources,
output : 'html',
build_by_default : true,
install : true,
install_dir : doc_install_dir,
install_tag : 'doc')
custom_target('documentation-linkcheck',
command : [sphinx, '-W', '-b', 'linkcheck', meson.current_source_dir(), '@OUTPUT@'],
build_always_stale : true,
input : docs_sources,
output : 'linkcheck')
endif

View File

@ -0,0 +1,70 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _python-bindings:
Python Bindings for libcamera
=============================
.. warning::
The bindings are under work, and the API will change.
Differences to the C++ API
--------------------------
As a rule of thumb the bindings try to follow the C++ API when possible. This
chapter lists the differences.
Mostly these differences fall under two categories:
1. Differences caused by the inherent differences between C++ and Python.
These differences are usually caused by the use of threads or differences in
C++ vs Python memory management.
2. Differences caused by the code being work-in-progress. It's not always
trivial to create a binding in a satisfying way, and the current bindings
contain simplified versions of the C++ API just to get forward. These
differences are expected to eventually go away.
Coding Style
------------
The C++ code for the bindings follows the libcamera coding style as much as
possible. Note that the indentation does not quite follow the clang-format
style, as clang-format makes a mess of the style used.
The API visible to the Python side follows the Python style as much as possible.
This means that e.g. ``Camera::generateConfiguration`` maps to
``Camera.generate_configuration``.
CameraManager
-------------
The Python API provides a singleton CameraManager via ``CameraManager.singleton()``.
There is no need to start or stop the CameraManager.
Handling Completed Requests
---------------------------
The Python bindings do not expose the ``Camera::requestCompleted`` signal
directly as the signal is invoked from another thread and it has real-time
constraints. Instead the bindings queue the completed requests internally and
use an eventfd to inform the user that there are completed requests.
The user can wait on the eventfd, and upon getting an event, use
``CameraManager.get_ready_requests()`` to clear the eventfd event and to get
the completed requests.
Controls & Properties
---------------------
The classes related to controls and properties are rather complex to implement
directly in the Python bindings. There are some simplifications in the Python
bindings:
- There is no ControlValue class. Python objects are automatically converted
to ControlValues and vice versa.
- There is no ControlList class. A Python dict with ControlId keys and Python
object values is used instead.
- There is no ControlInfoMap class. A Python dict with ControlId keys and
ControlInfo values is used instead.

View File

@ -0,0 +1,93 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _sensor-driver-requirements:
Sensor Driver Requirements
==========================
libcamera handles imaging devices in the CameraSensor class and defines
a consistent interface through its API towards other library components.
The CameraSensor class uses the V4L2 subdev kernel API to interface with the
camera sensor through one or multiple sub-devices exposed in userspace by
the sensor driver.
In order for libcamera to be fully operational and provide all the required
information to interface with the camera sensor to applications and pipeline
handlers, a set of mandatory and optional features the driver has to support
has been defined.
Mandatory Requirements
----------------------
The sensor driver is assumed to be fully compliant with the V4L2 specification.
For RAW sensors, the sensor driver shall support the following V4L2 controls:
* `V4L2_CID_ANALOGUE_GAIN`_
* `V4L2_CID_EXPOSURE`_
* `V4L2_CID_HBLANK`_
* `V4L2_CID_PIXEL_RATE`_
* `V4L2_CID_VBLANK`_
.. _V4L2_CID_ANALOGUE_GAIN: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-source.html
.. _V4L2_CID_EXPOSURE: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/control.html
.. _V4L2_CID_HBLANK: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-source.html
.. _V4L2_CID_PIXEL_RATE: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-process.html
.. _V4L2_CID_VBLANK: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-image-source.html
The ``ANALOGUE_GAIN`` control units are sensor-specific. libcamera requires
a sensor-specific CameraSensorHelper implementation to translate between the
sensor specific ``gain code`` and the analogue ``gain value`` expressed as an
absolute number as defined by ``controls::AnalogueGain``.
While V4L2 doesn't specify a unit for the ``EXPOSURE`` control, libcamera
requires it to be expressed as a number of image lines. Camera sensor drivers
that do not comply with this requirement will need to be adapted or will produce
incorrect results.
The ``HBLANK``, ``PIXEL_RATE`` and ``VBLANK`` controls are used to compute the
sensor output timings.
Optional Requirements
---------------------
The sensor driver should support the following V4L2 controls:
* `V4L2_CID_CAMERA_ORIENTATION`_
* `V4L2_CID_CAMERA_SENSOR_ROTATION`_
.. _V4L2_CID_CAMERA_ORIENTATION: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-camera.html
.. _V4L2_CID_CAMERA_SENSOR_ROTATION: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/ext-ctrls-camera.html
The controls are used to register the camera location and rotation.
In order to support rotating the image the sensor driver should support
* `V4L2_CID_HFLIP`_
* `V4L2_CID_VFLIP`_
.. _V4L2_CID_HFLIP: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/control.html
.. _V4L2_CID_VFLIP: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/control.html
The controls must be writable from userspace. In case of a RAW Bayer sensors,
drivers should correctly report if vertical/horizontal flips modify the Bayer
pattern ordering by reporting the `V4L2_CTRL_FLAG_MODIFY_LAYOUT` control flag.
The sensor driver should implement support for the V4L2 Selection API,
specifically it should implement support for the
`VIDIOC_SUBDEV_G_SELECTION`_ ioctl with support for the following selection
targets:
.. _VIDIOC_SUBDEV_G_SELECTION: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/vidioc-subdev-g-selection.html#c.V4L.VIDIOC_SUBDEV_G_SELECTION
* `V4L2_SEL_TGT_CROP_BOUNDS`_ to report the readable pixel array area size
* `V4L2_SEL_TGT_CROP_DEFAULT`_ to report the active pixel array area size
* `V4L2_SEL_TGT_CROP`_ to report the analogue selection rectangle
Support for the selection API is scheduled to become a mandatory feature in
the near future.
.. _V4L2_SEL_TGT_CROP_BOUNDS: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/v4l2-selection-targets.html
.. _V4L2_SEL_TGT_CROP_DEFAULT: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/v4l2-selection-targets.html
.. _V4L2_SEL_TGT_CROP: https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/v4l2-selection-targets.html

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 171 KiB

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 80 KiB

View File

@ -0,0 +1,77 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. _software-isp-benchmarking:
Software ISP benchmarking
=========================
The Software ISP is particularly sensitive to performance regressions therefore
it is a good idea to always benchmark the Software ISP before and after making
changes to it and ensure that there are no performance regressions.
DebayerCpu class builtin benchmark
----------------------------------
The DebayerCpu class has a builtin benchmark. This benchmark measures the time
spent on processing (collecting statistics and debayering) only, it does not
measure the time spent on capturing or outputting the frames.
The builtin benchmark always runs. So this can be used by simply running "cam"
or "qcam" with a pipeline using the Software ISP.
When it runs it will skip measuring the first 30 frames to allow the caches and
the CPU temperature (turbo-ing) to warm-up and then it measures 30 fps and shows
the total and per frame processing time using an info level log message:
.. code-block:: text
INFO Debayer debayer_cpu.cpp:907 Processed 30 frames in 244317us, 8143 us/frame
To get stable measurements it is advised to disable any other processes which
may cause significant CPU usage (e.g. disable wifi, bluetooth and browsers).
When possible it is also advisable to disable CPU turbo-ing and
frequency-scaling.
For example when benchmarking on a Lenovo ThinkPad X1 Yoga Gen 8, with the
charger plugged in, the CPU can be fixed to run at 2 GHz using:
.. code-block:: shell
sudo x86_energy_perf_policy --turbo-enable 0
sudo cpupower frequency-set -d 2GHz -u 2GHz
with these settings the builtin bench reports a processing time of ~7.8ms/frame
on this laptop for FHD SGRBG10 (unpacked) bayer data.
Measuring power consumption
---------------------------
Since the Software ISP is often used on mobile devices it is also important to
measure power consumption and ensure that that does not regress.
For example to measure power consumption on a Lenovo ThinkPad X1 Yoga Gen 8 it
needs to be running on battery and it should be configured with its
platform-profile (/sys/firmware/acpi/platform_profile) set to balanced and with
its default turbo and frequency-scaling behavior to match real world usage.
Then start qcam to capture a FHD picture at 30 fps and position the qcam window
so that it is fully visible. After this run the following command to monitor the
power consumption:
.. code-block:: shell
watch -n 10 cat /sys/class/power_supply/BAT0/power_now /sys/class/hwmon/hwmon6/fan?_input
Note this not only measures the power consumption in µW it also monitors the
speed of this laptop's 2 fans. This is important because depending on the
ambient temperature the 2 fans may spin up while testing and this will cause an
additional power consumption of approx. 0.5 W messing up the measurement.
After starting qcam + the watch command let the laptop sit without using it for
2 minutes for the readings to stabilize. Then check that the fans have not
turned on and manually take a couple of consecutive power readings and average
these.
On the example Lenovo ThinkPad X1 Yoga Gen 8 laptop this results in a measured
power consumption of approx. 13 W while running qcam versus approx. 4-5 W while
setting idle with its OLED panel on.

View File

@ -0,0 +1,14 @@
{#
SPDX-License-Identifier: CC-BY-SA-4.0
#}
<footer>
<div id="signature">
{%- if show_copyright %}
{%- if hasdoc('copyright') %}
{% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
{%- else %}
{% trans copyright=copyright|e %}&copy; Copyright {{ copyright }}.{% endtrans %}
{%- endif %}
{%- endif %}
</div>
</footer>

View File

@ -0,0 +1,109 @@
{#
SPDX-License-Identifier: CC-BY-SA-4.0
#}
{# TEMPLATE VAR SETTINGS #}
{%- set url_root = pathto('', 1) %}
{%- if url_root == '#' %}{% set url_root = '' %}{% endif %}
{%- if not embedded and docstitle %}
{%- set titlesuffix = " &mdash; "|safe + docstitle|e %}
{%- else %}
{%- set titlesuffix = "" %}
{%- endif %}
<!DOCTYPE html>
<head>
<meta charset="utf-8">
{{ metatags }}
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% block htmltitle %}
<title>{{ title|striptags|e }}{{ titlesuffix }}</title>
{% endblock %}
{# FAVICON #}
{% if favicon %}
<link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
{% endif %}
{# CSS #}
{# OPENSEARCH #}
{% if not embedded %}
{% if use_opensearch %}
<link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/>
{% endif %}
{% endif %}
{% for cssfile in css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{% for cssfile in extra_css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{%- block linktags %}
{%- if hasdoc('about') %}
<link rel="author" title="{{ _('About these documents') }}"
href="{{ pathto('about') }}"/>
{%- endif %}
{%- if hasdoc('genindex') %}
<link rel="index" title="{{ _('Index') }}"
href="{{ pathto('genindex') }}"/>
{%- endif %}
{%- if hasdoc('search') %}
<link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}"/>
{%- endif %}
{%- if hasdoc('copyright') %}
<link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}"/>
{%- endif %}
<link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}"/>
{%- if parents %}
<link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}"/>
{%- endif %}
{%- if next %}
<link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}"/>
{%- endif %}
{%- if prev %}
<link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}"/>
{%- endif %}
{%- endblock %}
{%- block extrahead %} {% endblock %}
</head>
<body role="document">
<header>
<div id="navbar">
<div class="navbar-brand">
<div class="navbar-logo"> _
+-/ \-+
| (o) |
+-----+</div>
<div class="navbar-name"><span class="text-light">lib</span>camera</div>
</div>
<div class="navbar">
{{ toctree(maxdepth=1) }}
<div class="searchbox" role="search">
<form class="search" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" />
<input type="submit" value="Go" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
</div>
</header>
<div id="content">
{# PAGE CONTENT #}
<div class="block">
{% block body %}{% endblock %}
</div>
</div>
{% include "footer.html" %}
</body>
</html>

View File

@ -0,0 +1,63 @@
{#
SPDX-License-Identifier: CC-BY-SA-4.0
#}
{#
basic/search.html
~~~~~~~~~~~~~~~~~
Template for the search page.
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
#}
{%- extends "layout.html" %}
{% block extrahead %}
<script type="text/javascript" id="documentation_options" data-url_root="{{ pathto('', 1) }}" src="{{ pathto('_static/documentation_options.js', 1) }}"></script>
{%- for scriptfile in script_files %}
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
{%- endfor %}
<script type="text/javascript" src="_static/searchtools.js"></script>
<script type="text/javascript">
jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); });
</script>
{# this is used when loading the search index using $.ajax fails,
such as on Chrome for documents on localhost #}
<script type="text/javascript" id="searchindexloader"></script>
{% endblock %}
{% block body %}
<h1 id="search-documentation">{{ _('Search') }}</h1>
<div id="fallback" class="admonition warning">
<script type="text/javascript">$('#fallback').hide();</script>
<p>
Please activate JavaScript to enable the search functionality.
</p>
</div>
<p>
From here you can search these documents. Enter your search
words into the box below and click "search". Note that the search
function will automatically search for all of the words. Pages
containing fewer words won't appear in the result list.
</p>
<form action="" method="get">
<input type="text" name="q" value="" />
<input type="submit" value="{{ _('search') }}" />
<span id="search-progress" style="padding-left: 10px"></span>
</form>
{% if search_performed %}
<h2>{{ _('Search Results') }}</h2>
{% if not search_results %}
<p>{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}</p>
{% endif %}
{% endif %}
<div id="search-results">
{% if search_results %}
<ul>
{% for href, caption, context in search_results %}
<li><a href="{{ pathto(item.href) }}">{{ caption }}</a>
<div class="context">{{ context|e }}</div>
</li>
{% endfor %}
</ul>
{% endif %}
</div>
{% endblock %}

View File

@ -0,0 +1,291 @@
/* SPDX-License-Identifier: CC-BY-SA-4.0 */
html {
background-image: linear-gradient(to bottom right, #4895e1, #56c3ae);
background-size: cover;
background-repeat: no-repeat;
min-height: 100vh;
}
body {
color: rgb(0, 0, 0, 0.65);
font-family: Arial, sans-serif;
margin: 0px;
}
a {
color: unset;
font-weight: bold;
text-decoration: underline dotted;
}
a.headerlink {
color: rgba(0, 0, 0, 0.2);
font-size: 70%;
padding-left: 5px;
visibility: hidden;
}
a.toc-backref {
text-decoration: none;
}
h1:hover a.headerlink,
h2:hover a.headerlink,
h3:hover a.headerlink,
h4:hover a.headerlink,
h5:hover a.headerlink,
h6:hover a.headerlink {
visibility: visible;
}
dt {
font-weight: bold;
}
.text-light {
color: rgba(255, 255, 255, 0.3);
}
div#navbar {
margin-top: 0px;
}
div.navbar-brand {
color: rgb(255, 255, 255, 1.0);
float: left;
font-size: 36px;
margin: 0px 24px 24px 24px;
}
div.navbar-logo {
float: left;
font-family: monospace;
font-size: 18px;
font-weight: bold;
white-space: pre;
}
div.navbar-name {
float: left;
color: rgb(255, 255, 255, 1.0);
font-size: 34px;
margin-top: 31px;
margin-left: 10px;
padding-top: 1px;
}
div.navbar {
float: right;
}
div.navbar p.caption {
height: 0px;
margin: 0px;
visibility: hidden;
}
div.navbar ul {
float: left;
font-size: 24px;
list-style: none;
margin-top: 42px;
margin-right: 20px;
padding-left: 0px;
}
div.navbar a {
font-weight: normal;
text-decoration: none;
}
div.navbar li {
float: left;
margin-left: 20px;
margin-right: 20px;
position: relative;
}
div.navbar li a {
color: rgb(255, 255, 255, 0.5);
position: relative;
}
div.navbar li a:before {
content: "";
position: absolute;
width: 100%;
height: 2px;
bottom: 0;
left: 0;
background-color: rgb(255, 255, 255, 0.5);
visibility: hidden;
transform: scaleX(0);
transition: all 0.3s ease-in-out 0s;
}
div.navbar li a:hover {
color: rgb(255, 255, 255, 1.0);
}
div.navbar li a:hover:before {
visibility: visible;
transform: scaleX(1);
}
div.navbar li.current a {
color: rgb(255, 255, 255, 1.0);
}
div.navbar li.current a:before {
visibility: visible;
transform: unset;
transition: unset;
}
div.navbar div.searchbox {
background-color: white;
float: right;
margin-right: 50px;
margin-top: 42px;
}
div.navbar input[type=text] {
border-width: 0;
height: 2em;
margin-left: 10px;
margin-right: 5px;
}
div.navbar input[type=submit] {
background-color: white;
background-image: url(../search.png);
background-repeat: no-repeat;
border-width: 0;
color: rgba(0, 0, 0, 0);
margin-right: 2px;
width: 20px;
}
div#frontpage {
clear: both;
padding-top: 50px;
margin-left: auto;
margin-right: auto;
width: 75%;
display: flex;
justify-content: space-between;
}
div#frontpage > div.block {
background-color: white;
border-radius: 5px;
box-shadow: 0 4px 16px 0 rgba(0, 0, 0, 0.2), 0 6px 40px 0 rgba(0, 0, 0, 0.19);
color: rgb(0, 0, 0, 0.5);
font-size: 20px;
margin-bottom: 40px;
margin-right: 20px;
margin-left: 20px;
padding: 20px 60px 20px 60px;
text-align: center;
width: 50%;
}
div#frontpage > div.block h1 {
font-size: 64px;
padding-left: 20%;
padding-right: 20%;
text-align: center;
text-shadow: 4px 4px 5px;
}
div#content {
background-color: white;
clear: both;
padding-top: 50px;
padding-bottom: 50px;
margin-left: 0px;
margin-right: 0px;
}
div#content > div.block {
font-size: 16px;
margin-right: 0px;
margin-left: 0px;
max-width: 1280px;
padding: 0px 60px 0px 60px;
text-align: justify;
}
div#content > div.block h1 {
font-size: 40px;
margin-top: 0px;
text-align: left;
}
div#content > div.block > div.section {
max-width: 800px;
}
div.local.topic {
float: right;
background-color: #fcfcff;
border: 1px dotted #4896e0;
margin-left: 20px;
margin-right: 0px;
max-width: 15em;
padding: 10px 20px 10px 10px;
text-align: left;
}
div.local.topic ul {
padding-left: 20px;
margin-bottom: 5px;
}
div.local.topic > ul:before {
content: "Contents";
display: block;
font-weight: bold;
margin-bottom: 10px;
}
div.local.topic a {
font-weight: normal;
padding-left: 10px;
text-decoration: none;
}
div.highlight-shell > div.highlight > pre,
pre.console {
background-color: #fcfcff;
border: 1px dotted #4896e0;
margin-left: 0em;
padding: 10px;
text-align: left;
}
div.highlight-default > div.highlight > pre,
pre.diagram {
background-color: #fcfcff;
border: 1px dotted #4896e0;
font-size: 12px;
margin-left: 0em;
padding: 10px;
text-align: left;
width: 47em;
}
div#signature {
color: rgb(255, 255, 255, 0.5);
margin: 20px;
float: right;
font-size: 12px;
}
#libcamera div.toctree-wrapper {
height: 0px;
margin: 0px;
padding: 0px;
visibility: hidden;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 482 B

View File

@ -0,0 +1,7 @@
# SPDX-License-Identifier: CC-BY-SA-4.0
[theme]
inherit = basic
stylesheet = css/theme.css
[options]

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,22 @@
Copyright (c) <year> <owner>. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,26 @@
Copyright (c) <year> <owner>. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,156 @@
Creative Commons Attribution 4.0 International
Creative Commons Corporation (“Creative Commons”) is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an “as-is” basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible.
Using Creative Commons Public Licenses
Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses.
Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC-licensed material, or material used under an exception or limitation to copyright. More considerations for licensors.
Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensors permission is not necessary for any reasonfor example, because of any applicable exception or limitation to copyrightthen that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described. Although not required by our licenses, you are encouraged to respect those requests where reasonable. More considerations for the public.
Creative Commons Attribution 4.0 International Public License
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
Section 1 Definitions.
a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
b. Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License.
c. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
d. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
e. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
f. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
g. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
h. Licensor means the individual(s) or entity(ies) granting rights under this Public License.
i. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
j. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
k. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
Section 2 Scope.
a. License grant.
1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
A. reproduce and Share the Licensed Material, in whole or in part; and
B. produce, reproduce, and Share Adapted Material.
2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
3. Term. The term of this Public License is specified in Section 6(a).
4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.
5. Downstream recipients.
A. Offer from the Licensor Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
B. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
b. Other rights.
1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
2. Patent and trademark rights are not licensed under this Public License.
3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.
Section 3 License Conditions.
Your exercise of the Licensed Rights is expressly made subject to the following conditions.
a. Attribution.
1. If You Share the Licensed Material (including in modified form), You must:
A. retain the following if it is supplied by the Licensor with the Licensed Material:
i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
ii. a copyright notice;
iii. a notice that refers to this Public License;
iv. a notice that refers to the disclaimer of warranties;
v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
B. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and
C. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.
2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
4. If You Share Adapted Material You produce, the Adapter's License You apply must not prevent recipients of the Adapted Material from complying with this Public License.
Section 4 Sui Generis Database Rights.
Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:
a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database;
b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
c. You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
Section 5 Disclaimer of Warranties and Limitation of Liability.
a. Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.
b. To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.
c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.
Section 6 Term and Termination.
a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
b. Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
2. upon express reinstatement by the Licensor.
c. For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
d. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
e. Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
Section 7 Other Terms and Conditions.
a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
Section 8 Interpretation.
a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the “Licensor.” Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark “Creative Commons” or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses.
Creative Commons may be contacted at creativecommons.org.

View File

@ -0,0 +1,428 @@
Attribution-ShareAlike 4.0 International
=======================================================================
Creative Commons Corporation ("Creative Commons") is not a law firm and
does not provide legal services or legal advice. Distribution of
Creative Commons public licenses does not create a lawyer-client or
other relationship. Creative Commons makes its licenses and related
information available on an "as-is" basis. Creative Commons gives no
warranties regarding its licenses, any material licensed under their
terms and conditions, or any related information. Creative Commons
disclaims all liability for damages resulting from their use to the
fullest extent possible.
Using Creative Commons Public Licenses
Creative Commons public licenses provide a standard set of terms and
conditions that creators and other rights holders may use to share
original works of authorship and other material subject to copyright
and certain other rights specified in the public license below. The
following considerations are for informational purposes only, are not
exhaustive, and do not form part of our licenses.
Considerations for licensors: Our public licenses are
intended for use by those authorized to give the public
permission to use material in ways otherwise restricted by
copyright and certain other rights. Our licenses are
irrevocable. Licensors should read and understand the terms
and conditions of the license they choose before applying it.
Licensors should also secure all rights necessary before
applying our licenses so that the public can reuse the
material as expected. Licensors should clearly mark any
material not subject to the license. This includes other CC-
licensed material, or material used under an exception or
limitation to copyright. More considerations for licensors:
wiki.creativecommons.org/Considerations_for_licensors
Considerations for the public: By using one of our public
licenses, a licensor grants the public permission to use the
licensed material under specified terms and conditions. If
the licensor's permission is not necessary for any reason--for
example, because of any applicable exception or limitation to
copyright--then that use is not regulated by the license. Our
licenses grant only permissions under copyright and certain
other rights that a licensor has authority to grant. Use of
the licensed material may still be restricted for other
reasons, including because others have copyright or other
rights in the material. A licensor may make special requests,
such as asking that all changes be marked or described.
Although not required by our licenses, you are encouraged to
respect those requests where reasonable. More considerations
for the public:
wiki.creativecommons.org/Considerations_for_licensees
=======================================================================
Creative Commons Attribution-ShareAlike 4.0 International Public
License
By exercising the Licensed Rights (defined below), You accept and agree
to be bound by the terms and conditions of this Creative Commons
Attribution-ShareAlike 4.0 International Public License ("Public
License"). To the extent this Public License may be interpreted as a
contract, You are granted the Licensed Rights in consideration of Your
acceptance of these terms and conditions, and the Licensor grants You
such rights in consideration of benefits the Licensor receives from
making the Licensed Material available under these terms and
conditions.
Section 1 -- Definitions.
a. Adapted Material means material subject to Copyright and Similar
Rights that is derived from or based upon the Licensed Material
and in which the Licensed Material is translated, altered,
arranged, transformed, or otherwise modified in a manner requiring
permission under the Copyright and Similar Rights held by the
Licensor. For purposes of this Public License, where the Licensed
Material is a musical work, performance, or sound recording,
Adapted Material is always produced where the Licensed Material is
synched in timed relation with a moving image.
b. Adapter's License means the license You apply to Your Copyright
and Similar Rights in Your contributions to Adapted Material in
accordance with the terms and conditions of this Public License.
c. BY-SA Compatible License means a license listed at
creativecommons.org/compatiblelicenses, approved by Creative
Commons as essentially the equivalent of this Public License.
d. Copyright and Similar Rights means copyright and/or similar rights
closely related to copyright including, without limitation,
performance, broadcast, sound recording, and Sui Generis Database
Rights, without regard to how the rights are labeled or
categorized. For purposes of this Public License, the rights
specified in Section 2(b)(1)-(2) are not Copyright and Similar
Rights.
e. Effective Technological Measures means those measures that, in the
absence of proper authority, may not be circumvented under laws
fulfilling obligations under Article 11 of the WIPO Copyright
Treaty adopted on December 20, 1996, and/or similar international
agreements.
f. Exceptions and Limitations means fair use, fair dealing, and/or
any other exception or limitation to Copyright and Similar Rights
that applies to Your use of the Licensed Material.
g. License Elements means the license attributes listed in the name
of a Creative Commons Public License. The License Elements of this
Public License are Attribution and ShareAlike.
h. Licensed Material means the artistic or literary work, database,
or other material to which the Licensor applied this Public
License.
i. Licensed Rights means the rights granted to You subject to the
terms and conditions of this Public License, which are limited to
all Copyright and Similar Rights that apply to Your use of the
Licensed Material and that the Licensor has authority to license.
j. Licensor means the individual(s) or entity(ies) granting rights
under this Public License.
k. Share means to provide material to the public by any means or
process that requires permission under the Licensed Rights, such
as reproduction, public display, public performance, distribution,
dissemination, communication, or importation, and to make material
available to the public including in ways that members of the
public may access the material from a place and at a time
individually chosen by them.
l. Sui Generis Database Rights means rights other than copyright
resulting from Directive 96/9/EC of the European Parliament and of
the Council of 11 March 1996 on the legal protection of databases,
as amended and/or succeeded, as well as other essentially
equivalent rights anywhere in the world.
m. You means the individual or entity exercising the Licensed Rights
under this Public License. Your has a corresponding meaning.
Section 2 -- Scope.
a. License grant.
1. Subject to the terms and conditions of this Public License,
the Licensor hereby grants You a worldwide, royalty-free,
non-sublicensable, non-exclusive, irrevocable license to
exercise the Licensed Rights in the Licensed Material to:
a. reproduce and Share the Licensed Material, in whole or
in part; and
b. produce, reproduce, and Share Adapted Material.
2. Exceptions and Limitations. For the avoidance of doubt, where
Exceptions and Limitations apply to Your use, this Public
License does not apply, and You do not need to comply with
its terms and conditions.
3. Term. The term of this Public License is specified in Section
6(a).
4. Media and formats; technical modifications allowed. The
Licensor authorizes You to exercise the Licensed Rights in
all media and formats whether now known or hereafter created,
and to make technical modifications necessary to do so. The
Licensor waives and/or agrees not to assert any right or
authority to forbid You from making technical modifications
necessary to exercise the Licensed Rights, including
technical modifications necessary to circumvent Effective
Technological Measures. For purposes of this Public License,
simply making modifications authorized by this Section 2(a)
(4) never produces Adapted Material.
5. Downstream recipients.
a. Offer from the Licensor -- Licensed Material. Every
recipient of the Licensed Material automatically
receives an offer from the Licensor to exercise the
Licensed Rights under the terms and conditions of this
Public License.
b. Additional offer from the Licensor -- Adapted Material.
Every recipient of Adapted Material from You
automatically receives an offer from the Licensor to
exercise the Licensed Rights in the Adapted Material
under the conditions of the Adapter's License You apply.
c. No downstream restrictions. You may not offer or impose
any additional or different terms or conditions on, or
apply any Effective Technological Measures to, the
Licensed Material if doing so restricts exercise of the
Licensed Rights by any recipient of the Licensed
Material.
6. No endorsement. Nothing in this Public License constitutes or
may be construed as permission to assert or imply that You
are, or that Your use of the Licensed Material is, connected
with, or sponsored, endorsed, or granted official status by,
the Licensor or others designated to receive attribution as
provided in Section 3(a)(1)(A)(i).
b. Other rights.
1. Moral rights, such as the right of integrity, are not
licensed under this Public License, nor are publicity,
privacy, and/or other similar personality rights; however, to
the extent possible, the Licensor waives and/or agrees not to
assert any such rights held by the Licensor to the limited
extent necessary to allow You to exercise the Licensed
Rights, but not otherwise.
2. Patent and trademark rights are not licensed under this
Public License.
3. To the extent possible, the Licensor waives any right to
collect royalties from You for the exercise of the Licensed
Rights, whether directly or through a collecting society
under any voluntary or waivable statutory or compulsory
licensing scheme. In all other cases the Licensor expressly
reserves any right to collect such royalties.
Section 3 -- License Conditions.
Your exercise of the Licensed Rights is expressly made subject to the
following conditions.
a. Attribution.
1. If You Share the Licensed Material (including in modified
form), You must:
a. retain the following if it is supplied by the Licensor
with the Licensed Material:
i. identification of the creator(s) of the Licensed
Material and any others designated to receive
attribution, in any reasonable manner requested by
the Licensor (including by pseudonym if
designated);
ii. a copyright notice;
iii. a notice that refers to this Public License;
iv. a notice that refers to the disclaimer of
warranties;
v. a URI or hyperlink to the Licensed Material to the
extent reasonably practicable;
b. indicate if You modified the Licensed Material and
retain an indication of any previous modifications; and
c. indicate the Licensed Material is licensed under this
Public License, and include the text of, or the URI or
hyperlink to, this Public License.
2. You may satisfy the conditions in Section 3(a)(1) in any
reasonable manner based on the medium, means, and context in
which You Share the Licensed Material. For example, it may be
reasonable to satisfy the conditions by providing a URI or
hyperlink to a resource that includes the required
information.
3. If requested by the Licensor, You must remove any of the
information required by Section 3(a)(1)(A) to the extent
reasonably practicable.
b. ShareAlike.
In addition to the conditions in Section 3(a), if You Share
Adapted Material You produce, the following conditions also apply.
1. The Adapter's License You apply must be a Creative Commons
license with the same License Elements, this version or
later, or a BY-SA Compatible License.
2. You must include the text of, or the URI or hyperlink to, the
Adapter's License You apply. You may satisfy this condition
in any reasonable manner based on the medium, means, and
context in which You Share Adapted Material.
3. You may not offer or impose any additional or different terms
or conditions on, or apply any Effective Technological
Measures to, Adapted Material that restrict exercise of the
rights granted under the Adapter's License You apply.
Section 4 -- Sui Generis Database Rights.
Where the Licensed Rights include Sui Generis Database Rights that
apply to Your use of the Licensed Material:
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
to extract, reuse, reproduce, and Share all or a substantial
portion of the contents of the database;
b. if You include all or a substantial portion of the database
contents in a database in which You have Sui Generis Database
Rights, then the database in which You have Sui Generis Database
Rights (but not its individual contents) is Adapted Material,
including for purposes of Section 3(b); and
c. You must comply with the conditions in Section 3(a) if You Share
all or a substantial portion of the contents of the database.
For the avoidance of doubt, this Section 4 supplements and does not
replace Your obligations under this Public License where the Licensed
Rights include other Copyright and Similar Rights.
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
c. The disclaimer of warranties and limitation of liability provided
above shall be interpreted in a manner that, to the extent
possible, most closely approximates an absolute disclaimer and
waiver of all liability.
Section 6 -- Term and Termination.
a. This Public License applies for the term of the Copyright and
Similar Rights licensed here. However, if You fail to comply with
this Public License, then Your rights under this Public License
terminate automatically.
b. Where Your right to use the Licensed Material has terminated under
Section 6(a), it reinstates:
1. automatically as of the date the violation is cured, provided
it is cured within 30 days of Your discovery of the
violation; or
2. upon express reinstatement by the Licensor.
For the avoidance of doubt, this Section 6(b) does not affect any
right the Licensor may have to seek remedies for Your violations
of this Public License.
c. For the avoidance of doubt, the Licensor may also offer the
Licensed Material under separate terms or conditions or stop
distributing the Licensed Material at any time; however, doing so
will not terminate this Public License.
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
License.
Section 7 -- Other Terms and Conditions.
a. The Licensor shall not be bound by any additional or different
terms or conditions communicated by You unless expressly agreed.
b. Any arrangements, understandings, or agreements regarding the
Licensed Material not stated herein are separate from and
independent of the terms and conditions of this Public License.
Section 8 -- Interpretation.
a. For the avoidance of doubt, this Public License does not, and
shall not be interpreted to, reduce, limit, restrict, or impose
conditions on any use of the Licensed Material that could lawfully
be made without permission under this Public License.
b. To the extent possible, if any provision of this Public License is
deemed unenforceable, it shall be automatically reformed to the
minimum extent necessary to make it enforceable. If the provision
cannot be reformed, it shall be severed from this Public License
without affecting the enforceability of the remaining terms and
conditions.
c. No term or condition of this Public License will be waived and no
failure to comply consented to unless expressly agreed to by the
Licensor.
d. Nothing in this Public License constitutes or may be interpreted
as a limitation upon, or waiver of, any privileges and immunities
that apply to the Licensor or You, including from the legal
processes of any jurisdiction or authority.
=======================================================================
Creative Commons is not a party to its public
licenses. Notwithstanding, Creative Commons may elect to apply one of
its public licenses to material it publishes and in those instances
will be considered the “Licensor.” The text of the Creative Commons
public licenses is dedicated to the public domain under the CC0 Public
Domain Dedication. Except for the limited purpose of indicating that
material is shared under a Creative Commons public license or as
otherwise permitted by the Creative Commons policies published at
creativecommons.org/policies, Creative Commons does not authorize the
use of the trademark "Creative Commons" or any other trademark or logo
of Creative Commons without its prior written consent including,
without limitation, in connection with any unauthorized modifications
to any of its public licenses or any other arrangements,
understandings, or agreements concerning use of licensed material. For
the avoidance of doubt, this paragraph does not form part of the
public licenses.
Creative Commons may be contacted at creativecommons.org.

View File

@ -0,0 +1,119 @@
Creative Commons Legal Code
CC0 1.0 Universal CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES
NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE
AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION
ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE USE
OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER, AND DISCLAIMS
LIABILITY FOR DAMAGES RESULTING FROM THE USE OF THIS DOCUMENT OR THE INFORMATION
OR WORKS PROVIDED HEREUNDER.
Statement of Purpose
The laws of most jurisdictions throughout the world automatically confer exclusive
Copyright and Related Rights (defined below) upon the creator and subsequent
owner(s) (each and all, an "owner") of an original work of authorship and/or
a database (each, a "Work").
Certain owners wish to permanently relinquish those rights to a Work for the
purpose of contributing to a commons of creative, cultural and scientific
works ("Commons") that the public can reliably and without fear of later claims
of infringement build upon, modify, incorporate in other works, reuse and
redistribute as freely as possible in any form whatsoever and for any purposes,
including without limitation commercial purposes. These owners may contribute
to the Commons to promote the ideal of a free culture and the further production
of creative, cultural and scientific works, or to gain reputation or greater
distribution for their Work in part through the use and efforts of others.
For these and/or other purposes and motivations, and without any expectation
of additional consideration or compensation, the person associating CC0 with
a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright
and Related Rights in the Work, voluntarily elects to apply CC0 to the Work
and publicly distribute the Work under its terms, with knowledge of his or
her Copyright and Related Rights in the Work and the meaning and intended
legal effect of CC0 on those rights.
1. Copyright and Related Rights. A Work made available under CC0 may be protected
by copyright and related or neighboring rights ("Copyright and Related Rights").
Copyright and Related Rights include, but are not limited to, the following:
i. the right to reproduce, adapt, distribute, perform, display, communicate,
and translate a Work;
ii. moral rights retained by the original author(s) and/or performer(s);
iii. publicity and privacy rights pertaining to a person's image or likeness
depicted in a Work;
iv. rights protecting against unfair competition in regards to a Work, subject
to the limitations in paragraph 4(a), below;
v. rights protecting the extraction, dissemination, use and reuse of data
in a Work;
vi. database rights (such as those arising under Directive 96/9/EC of the
European Parliament and of the Council of 11 March 1996 on the legal protection
of databases, and under any national implementation thereof, including any
amended or successor version of such directive); and
vii. other similar, equivalent or corresponding rights throughout the world
based on applicable law or treaty, and any national implementations thereof.
2. Waiver. To the greatest extent permitted by, but not in contravention of,
applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and
unconditionally waives, abandons, and surrenders all of Affirmer's Copyright
and Related Rights and associated claims and causes of action, whether now
known or unknown (including existing as well as future claims and causes of
action), in the Work (i) in all territories worldwide, (ii) for the maximum
duration provided by applicable law or treaty (including future time extensions),
(iii) in any current or future medium and for any number of copies, and (iv)
for any purpose whatsoever, including without limitation commercial, advertising
or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the
benefit of each member of the public at large and to the detriment of Affirmer's
heirs and successors, fully intending that such Waiver shall not be subject
to revocation, rescission, cancellation, termination, or any other legal or
equitable action to disrupt the quiet enjoyment of the Work by the public
as contemplated by Affirmer's express Statement of Purpose.
3. Public License Fallback. Should any part of the Waiver for any reason be
judged legally invalid or ineffective under applicable law, then the Waiver
shall be preserved to the maximum extent permitted taking into account Affirmer's
express Statement of Purpose. In addition, to the extent the Waiver is so
judged Affirmer hereby grants to each affected person a royalty-free, non
transferable, non sublicensable, non exclusive, irrevocable and unconditional
license to exercise Affirmer's Copyright and Related Rights in the Work (i)
in all territories worldwide, (ii) for the maximum duration provided by applicable
law or treaty (including future time extensions), (iii) in any current or
future medium and for any number of copies, and (iv) for any purpose whatsoever,
including without limitation commercial, advertising or promotional purposes
(the "License"). The License shall be deemed effective as of the date CC0
was applied by Affirmer to the Work. Should any part of the License for any
reason be judged legally invalid or ineffective under applicable law, such
partial invalidity or ineffectiveness shall not invalidate the remainder of
the License, and in such case Affirmer hereby affirms that he or she will
not (i) exercise any of his or her remaining Copyright and Related Rights
in the Work or (ii) assert any associated claims and causes of action with
respect to the Work, in either case contrary to Affirmer's express Statement
of Purpose.
4. Limitations and Disclaimers.
a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered,
licensed or otherwise affected by this document.
b. Affirmer offers the Work as-is and makes no representations or warranties
of any kind concerning the Work, express, implied, statutory or otherwise,
including without limitation warranties of title, merchantability, fitness
for a particular purpose, non infringement, or the absence of latent or other
defects, accuracy, or the present or absence of errors, whether or not discoverable,
all to the greatest extent permissible under applicable law.
c. Affirmer disclaims responsibility for clearing rights of other persons
that may apply to the Work or any use thereof, including without limitation
any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims
responsibility for obtaining any necessary consents, permissions or other
rights required for any use of the Work.
d. Affirmer understands and acknowledges that Creative Commons is not a party
to this document and has no duty or obligation with respect to this CC0 or
use of the Work.

View File

@ -0,0 +1 @@
GPL-2.0-or-later.txt

View File

@ -0,0 +1,339 @@
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.

View File

@ -0,0 +1,319 @@
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
Everyone is permitted to copy and distribute verbatim copies of this license
document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your freedom to share
and change it. By contrast, the GNU General Public License is intended to
guarantee your freedom to share and change free software--to make sure the
software is free for all its users. This General Public License applies to
most of the Free Software Foundation's software and to any other program whose
authors commit to using it. (Some other Free Software Foundation software
is covered by the GNU Lesser General Public License instead.) You can apply
it to your programs, too.
When we speak of free software, we are referring to freedom, not price. Our
General Public Licenses are designed to make sure that you have the freedom
to distribute copies of free software (and charge for this service if you
wish), that you receive source code or can get it if you want it, that you
can change the software or use pieces of it in new free programs; and that
you know you can do these things.
To protect your rights, we need to make restrictions that forbid anyone to
deny you these rights or to ask you to surrender the rights. These restrictions
translate to certain responsibilities for you if you distribute copies of
the software, or if you modify it.
For example, if you distribute copies of such a program, whether gratis or
for a fee, you must give the recipients all the rights that you have. You
must make sure that they, too, receive or can get the source code. And you
must show them these terms so they know their rights.
We protect your rights with two steps: (1) copyright the software, and (2)
offer you this license which gives you legal permission to copy, distribute
and/or modify the software.
Also, for each author's protection and ours, we want to make certain that
everyone understands that there is no warranty for this free software. If
the software is modified by someone else and passed on, we want its recipients
to know that what they have is not the original, so that any problems introduced
by others will not reflect on the original authors' reputations.
Finally, any free program is threatened constantly by software patents. We
wish to avoid the danger that redistributors of a free program will individually
obtain patent licenses, in effect making the program proprietary. To prevent
this, we have made it clear that any patent must be licensed for everyone's
free use or not licensed at all.
The precise terms and conditions for copying, distribution and modification
follow.
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains a notice
placed by the copyright holder saying it may be distributed under the terms
of this General Public License. The "Program", below, refers to any such program
or work, and a "work based on the Program" means either the Program or any
derivative work under copyright law: that is to say, a work containing the
Program or a portion of it, either verbatim or with modifications and/or translated
into another language. (Hereinafter, translation is included without limitation
in the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not covered
by this License; they are outside its scope. The act of running the Program
is not restricted, and the output from the Program is covered only if its
contents constitute a work based on the Program (independent of having been
made by running the Program). Whether that is true depends on what the Program
does.
1. You may copy and distribute verbatim copies of the Program's source code
as you receive it, in any medium, provided that you conspicuously and appropriately
publish on each copy an appropriate copyright notice and disclaimer of warranty;
keep intact all the notices that refer to this License and to the absence
of any warranty; and give any other recipients of the Program a copy of this
License along with the Program.
You may charge a fee for the physical act of transferring a copy, and you
may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion of it,
thus forming a work based on the Program, and copy and distribute such modifications
or work under the terms of Section 1 above, provided that you also meet all
of these conditions:
a) You must cause the modified files to carry prominent notices stating that
you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in whole or
in part contains or is derived from the Program or any part thereof, to be
licensed as a whole at no charge to all third parties under the terms of this
License.
c) If the modified program normally reads commands interactively when run,
you must cause it, when started running for such interactive use in the most
ordinary way, to print or display an announcement including an appropriate
copyright notice and a notice that there is no warranty (or else, saying that
you provide a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this License.
(Exception: if the Program itself is interactive but does not normally print
such an announcement, your work based on the Program is not required to print
an announcement.)
These requirements apply to the modified work as a whole. If identifiable
sections of that work are not derived from the Program, and can be reasonably
considered independent and separate works in themselves, then this License,
and its terms, do not apply to those sections when you distribute them as
separate works. But when you distribute the same sections as part of a whole
which is a work based on the Program, the distribution of the whole must be
on the terms of this License, whose permissions for other licensees extend
to the entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest your
rights to work written entirely by you; rather, the intent is to exercise
the right to control the distribution of derivative or collective works based
on the Program.
In addition, mere aggregation of another work not based on the Program with
the Program (or with a work based on the Program) on a volume of a storage
or distribution medium does not bring the other work under the scope of this
License.
3. You may copy and distribute the Program (or a work based on it, under Section
2) in object code or executable form under the terms of Sections 1 and 2 above
provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable source code,
which must be distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three years, to give
any third party, for a charge no more than your cost of physically performing
source distribution, a complete machine-readable copy of the corresponding
source code, to be distributed under the terms of Sections 1 and 2 above on
a medium customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer to distribute
corresponding source code. (This alternative is allowed only for noncommercial
distribution and only if you received the program in object code or executable
form with such an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for making
modifications to it. For an executable work, complete source code means all
the source code for all modules it contains, plus any associated interface
definition files, plus the scripts used to control compilation and installation
of the executable. However, as a special exception, the source code distributed
need not include anything that is normally distributed (in either source or
binary form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component itself
accompanies the executable.
If distribution of executable or object code is made by offering access to
copy from a designated place, then offering equivalent access to copy the
source code from the same place counts as distribution of the source code,
even though third parties are not compelled to copy the source along with
the object code.
4. You may not copy, modify, sublicense, or distribute the Program except
as expressly provided under this License. Any attempt otherwise to copy, modify,
sublicense or distribute the Program is void, and will automatically terminate
your rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses terminated
so long as such parties remain in full compliance.
5. You are not required to accept this License, since you have not signed
it. However, nothing else grants you permission to modify or distribute the
Program or its derivative works. These actions are prohibited by law if you
do not accept this License. Therefore, by modifying or distributing the Program
(or any work based on the Program), you indicate your acceptance of this License
to do so, and all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the Program),
the recipient automatically receives a license from the original licensor
to copy, distribute or modify the Program subject to these terms and conditions.
You may not impose any further restrictions on the recipients' exercise of
the rights granted herein. You are not responsible for enforcing compliance
by third parties to this License.
7. If, as a consequence of a court judgment or allegation of patent infringement
or for any other reason (not limited to patent issues), conditions are imposed
on you (whether by court order, agreement or otherwise) that contradict the
conditions of this License, they do not excuse you from the conditions of
this License. If you cannot distribute so as to satisfy simultaneously your
obligations under this License and any other pertinent obligations, then as
a consequence you may not distribute the Program at all. For example, if a
patent license would not permit royalty-free redistribution of the Program
by all those who receive copies directly or indirectly through you, then the
only way you could satisfy both it and this License would be to refrain entirely
from distribution of the Program.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply and
the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any patents
or other property right claims or to contest validity of any such claims;
this section has the sole purpose of protecting the integrity of the free
software distribution system, which is implemented by public license practices.
Many people have made generous contributions to the wide range of software
distributed through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing to
distribute software through any other system and a licensee cannot impose
that choice.
This section is intended to make thoroughly clear what is believed to be a
consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in certain
countries either by patents or by copyrighted interfaces, the original copyright
holder who places the Program under this License may add an explicit geographical
distribution limitation excluding those countries, so that distribution is
permitted only in or among countries not thus excluded. In such case, this
License incorporates the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions of
the General Public License from time to time. Such new versions will be similar
in spirit to the present version, but may differ in detail to address new
problems or concerns.
Each version is given a distinguishing version number. If the Program specifies
a version number of this License which applies to it and "any later version",
you have the option of following the terms and conditions either of that version
or of any later version published by the Free Software Foundation. If the
Program does not specify a version number of this License, you may choose
any version ever published by the Free Software Foundation.
10. If you wish to incorporate parts of the Program into other free programs
whose distribution conditions are different, write to the author to ask for
permission. For software which is copyrighted by the Free Software Foundation,
write to the Free Software Foundation; we sometimes make exceptions for this.
Our decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing and reuse
of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR
THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE
STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM
"AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE
OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE
OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA
OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES
OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH
HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest possible
use to the public, the best way to achieve this is to make it free software
which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest to attach
them to the start of each source file to most effectively convey the exclusion
of warranty; and each file should have at least the "copyright" line and a
pointer to where the full notice is found.
<one line to give the program's name and an idea of what it does.>
Copyright (C) <yyyy> <name of author>
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 51 Franklin
Street, Fifth Floor, Boston, MA 02110-1301, USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this when
it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author Gnomovision comes
with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software,
and you are welcome to redistribute it under certain conditions; type `show
c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may be
called something other than `show w' and `show c'; they could even be mouse-clicks
or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your school,
if any, to sign a "copyright disclaimer" for the program, if necessary. Here
is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision'
(which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989 Ty Coon, President of Vice This General
Public License does not permit incorporating your program into proprietary
programs. If your program is a subroutine library, you may consider it more
useful to permit linking proprietary applications with the library. If this
is what you want to do, use the GNU Lesser General Public License instead
of this License.

View File

@ -0,0 +1 @@
GPL-2.0-only.txt

View File

@ -0,0 +1,468 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies of this license
document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts as the
successor of the GNU Library Public License, version 2, hence the version
number 2.1.]
Preamble
The licenses for most software are designed to take away your freedom to share
and change it. By contrast, the GNU General Public Licenses are intended to
guarantee your freedom to share and change free software--to make sure the
software is free for all its users.
This license, the Lesser General Public License, applies to some specially
designated software packages--typically libraries--of the Free Software Foundation
and other authors who decide to use it. You can use it too, but we suggest
you first think carefully about whether this license or the ordinary General
Public License is the better strategy to use in any particular case, based
on the explanations below.
When we speak of free software, we are referring to freedom of use, not price.
Our General Public Licenses are designed to make sure that you have the freedom
to distribute copies of free software (and charge for this service if you
wish); that you receive source code or can get it if you want it; that you
can change the software and use pieces of it in new free programs; and that
you are informed that you can do these things.
To protect your rights, we need to make restrictions that forbid distributors
to deny you these rights or to ask you to surrender these rights. These restrictions
translate to certain responsibilities for you if you distribute copies of
the library or if you modify it.
For example, if you distribute copies of the library, whether gratis or for
a fee, you must give the recipients all the rights that we gave you. You must
make sure that they, too, receive or can get the source code. If you link
other code with the library, you must provide complete object files to the
recipients, so that they can relink them with the library after making changes
to the library and recompiling it. And you must show them these terms so they
know their rights.
We protect your rights with a two-step method: (1) we copyright the library,
and (2) we offer you this license, which gives you legal permission to copy,
distribute and/or modify the library.
To protect each distributor, we want to make it very clear that there is no
warranty for the free library. Also, if the library is modified by someone
else and passed on, the recipients should know that what they have is not
the original version, so that the original author's reputation will not be
affected by problems that might be introduced by others.
Finally, software patents pose a constant threat to the existence of any free
program. We wish to make sure that a company cannot effectively restrict the
users of a free program by obtaining a restrictive license from a patent holder.
Therefore, we insist that any patent license obtained for a version of the
library must be consistent with the full freedom of use specified in this
license.
Most GNU software, including some libraries, is covered by the ordinary GNU
General Public License. This license, the GNU Lesser General Public License,
applies to certain designated libraries, and is quite different from the ordinary
General Public License. We use this license for certain libraries in order
to permit linking those libraries into non-free programs.
When a program is linked with a library, whether statically or using a shared
library, the combination of the two is legally speaking a combined work, a
derivative of the original library. The ordinary General Public License therefore
permits such linking only if the entire combination fits its criteria of freedom.
The Lesser General Public License permits more lax criteria for linking other
code with the library.
We call this license the "Lesser" General Public License because it does Less
to protect the user's freedom than the ordinary General Public License. It
also provides other free software developers Less of an advantage over competing
non-free programs. These disadvantages are the reason we use the ordinary
General Public License for many libraries. However, the Lesser license provides
advantages in certain special circumstances.
For example, on rare occasions, there may be a special need to encourage the
widest possible use of a certain library, so that it becomes a de-facto standard.
To achieve this, non-free programs must be allowed to use the library. A more
frequent case is that a free library does the same job as widely used non-free
libraries. In this case, there is little to gain by limiting the free library
to free software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free programs
enables a greater number of people to use a large body of free software. For
example, permission to use the GNU C Library in non-free programs enables
many more people to use the whole GNU operating system, as well as its variant,
the GNU/Linux operating system.
Although the Lesser General Public License is Less protective of the users'
freedom, it does ensure that the user of a program that is linked with the
Library has the freedom and the wherewithal to run that program using a modified
version of the Library.
The precise terms and conditions for copying, distribution and modification
follow. Pay close attention to the difference between a "work based on the
library" and a "work that uses the library". The former contains code derived
from the library, whereas the latter must be combined with the library in
order to run.
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other program
which contains a notice placed by the copyright holder or other authorized
party saying it may be distributed under the terms of this Lesser General
Public License (also called "this License"). Each licensee is addressed as
"you".
A "library" means a collection of software functions and/or data prepared
so as to be conveniently linked with application programs (which use some
of those functions and data) to form executables.
The "Library", below, refers to any such software library or work which has
been distributed under these terms. A "work based on the Library" means either
the Library or any derivative work under copyright law: that is to say, a
work containing the Library or a portion of it, either verbatim or with modifications
and/or translated straightforwardly into another language. (Hereinafter, translation
is included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for making modifications
to it. For a library, complete source code means all the source code for all
modules it contains, plus any associated interface definition files, plus
the scripts used to control compilation and installation of the library.
Activities other than copying, distribution and modification are not covered
by this License; they are outside its scope. The act of running a program
using the Library is not restricted, and output from such a program is covered
only if its contents constitute a work based on the Library (independent of
the use of the Library in a tool for writing it). Whether that is true depends
on what the Library does and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's complete source
code as you receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice and disclaimer
of warranty; keep intact all the notices that refer to this License and to
the absence of any warranty; and distribute a copy of this License along with
the Library.
You may charge a fee for the physical act of transferring a copy, and you
may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Library or any portion of it,
thus forming a work based on the Library, and copy and distribute such modifications
or work under the terms of Section 1 above, provided that you also meet all
of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices stating that
you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no charge to all
third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a table of
data to be supplied by an application program that uses the facility, other
than as an argument passed when the facility is invoked, then you must make
a good faith effort to ensure that, in the event an application does not supply
such function or table, the facility still operates, and performs whatever
part of its purpose remains meaningful.
(For example, a function in a library to compute square roots has a purpose
that is entirely well-defined independent of the application. Therefore, Subsection
2d requires that any application-supplied function or table used by this function
must be optional: if the application does not supply it, the square root function
must still compute square roots.)
These requirements apply to the modified work as a whole. If identifiable
sections of that work are not derived from the Library, and can be reasonably
considered independent and separate works in themselves, then this License,
and its terms, do not apply to those sections when you distribute them as
separate works. But when you distribute the same sections as part of a whole
which is a work based on the Library, the distribution of the whole must be
on the terms of this License, whose permissions for other licensees extend
to the entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest your
rights to work written entirely by you; rather, the intent is to exercise
the right to control the distribution of derivative or collective works based
on the Library.
In addition, mere aggregation of another work not based on the Library with
the Library (or with a work based on the Library) on a volume of a storage
or distribution medium does not bring the other work under the scope of this
License.
3. You may opt to apply the terms of the ordinary GNU General Public License
instead of this License to a given copy of the Library. To do this, you must
alter all the notices that refer to this License, so that they refer to the
ordinary GNU General Public License, version 2, instead of to this License.
(If a newer version than version 2 of the ordinary GNU General Public License
has appeared, then you can specify that version instead if you wish.) Do not
make any other change in these notices.
Once this change is made in a given copy, it is irreversible for that copy,
so the ordinary GNU General Public License applies to all subsequent copies
and derivative works made from that copy.
This option is useful when you wish to copy part of the code of the Library
into a program that is not a library.
4. You may copy and distribute the Library (or a portion or derivative of
it, under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you accompany it with the complete corresponding
machine-readable source code, which must be distributed under the terms of
Sections 1 and 2 above on a medium customarily used for software interchange.
If distribution of object code is made by offering access to copy from a designated
place, then offering equivalent access to copy the source code from the same
place satisfies the requirement to distribute the source code, even though
third parties are not compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the Library, but
is designed to work with the Library by being compiled or linked with it,
is called a "work that uses the Library". Such a work, in isolation, is not
a derivative work of the Library, and therefore falls outside the scope of
this License.
However, linking a "work that uses the Library" with the Library creates an
executable that is a derivative of the Library (because it contains portions
of the Library), rather than a "work that uses the library". The executable
is therefore covered by this License. Section 6 states terms for distribution
of such executables.
When a "work that uses the Library" uses material from a header file that
is part of the Library, the object code for the work may be a derivative work
of the Library even though the source code is not. Whether this is true is
especially significant if the work can be linked without the Library, or if
the work is itself a library. The threshold for this to be true is not precisely
defined by law.
If such an object file uses only numerical parameters, data structure layouts
and accessors, and small macros and small inline functions (ten lines or less
in length), then the use of the object file is unrestricted, regardless of
whether it is legally a derivative work. (Executables containing this object
code plus portions of the Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may distribute
the object code for the work under the terms of Section 6. Any executables
containing that work also fall under Section 6, whether or not they are linked
directly with the Library itself.
6. As an exception to the Sections above, you may also combine or link a "work
that uses the Library" with the Library to produce a work containing portions
of the Library, and distribute that work under terms of your choice, provided
that the terms permit modification of the work for the customer's own use
and reverse engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the Library
is used in it and that the Library and its use are covered by this License.
You must supply a copy of this License. If the work during execution displays
copyright notices, you must include the copyright notice for the Library among
them, as well as a reference directing the user to the copy of this License.
Also, you must do one of these things:
a) Accompany the work with the complete corresponding machine-readable source
code for the Library including whatever changes were used in the work (which
must be distributed under Sections 1 and 2 above); and, if the work is an
executable linked with the Library, with the complete machine-readable "work
that uses the Library", as object code and/or source code, so that the user
can modify the Library and then relink to produce a modified executable containing
the modified Library. (It is understood that the user who changes the contents
of definitions files in the Library will not necessarily be able to recompile
the application to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the Library. A
suitable mechanism is one that (1) uses at run time a copy of the library
already present on the user's computer system, rather than copying library
functions into the executable, and (2) will operate properly with a modified
version of the library, if the user installs one, as long as the modified
version is interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at least three years,
to give the same user the materials specified in Subsection 6a, above, for
a charge no more than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy from a designated
place, offer equivalent access to copy the above specified materials from
the same place.
e) Verify that the user has already received a copy of these materials or
that you have already sent this user a copy.
For an executable, the required form of the "work that uses the Library" must
include any data and utility programs needed for reproducing the executable
from it. However, as a special exception, the materials to be distributed
need not include anything that is normally distributed (in either source or
binary form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component itself
accompanies the executable.
It may happen that this requirement contradicts the license restrictions of
other proprietary libraries that do not normally accompany the operating system.
Such a contradiction means you cannot use both them and the Library together
in an executable that you distribute.
7. You may place library facilities that are a work based on the Library side-by-side
in a single library together with other library facilities not covered by
this License, and distribute such a combined library, provided that the separate
distribution of the work based on the Library and of the other library facilities
is otherwise permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work based on the
Library, uncombined with any other library facilities. This must be distributed
under the terms of the Sections above.
b) Give prominent notice with the combined library of the fact that part of
it is a work based on the Library, and explaining where to find the accompanying
uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute the Library
except as expressly provided under this License. Any attempt otherwise to
copy, modify, sublicense, link with, or distribute the Library is void, and
will automatically terminate your rights under this License. However, parties
who have received copies, or rights, from you under this License will not
have their licenses terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not signed
it. However, nothing else grants you permission to modify or distribute the
Library or its derivative works. These actions are prohibited by law if you
do not accept this License. Therefore, by modifying or distributing the Library
(or any work based on the Library), you indicate your acceptance of this License
to do so, and all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the Library),
the recipient automatically receives a license from the original licensor
to copy, distribute, link with or modify the Library subject to these terms
and conditions. You may not impose any further restrictions on the recipients'
exercise of the rights granted herein. You are not responsible for enforcing
compliance by third parties with this License.
11. If, as a consequence of a court judgment or allegation of patent infringement
or for any other reason (not limited to patent issues), conditions are imposed
on you (whether by court order, agreement or otherwise) that contradict the
conditions of this License, they do not excuse you from the conditions of
this License. If you cannot distribute so as to satisfy simultaneously your
obligations under this License and any other pertinent obligations, then as
a consequence you may not distribute the Library at all. For example, if a
patent license would not permit royalty-free redistribution of the Library
by all those who receive copies directly or indirectly through you, then the
only way you could satisfy both it and this License would be to refrain entirely
from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any patents
or other property right claims or to contest validity of any such claims;
this section has the sole purpose of protecting the integrity of the free
software distribution system which is implemented by public license practices.
Many people have made generous contributions to the wide range of software
distributed through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing to
distribute software through any other system and a licensee cannot impose
that choice.
This section is intended to make thoroughly clear what is believed to be a
consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in certain
countries either by patents or by copyrighted interfaces, the original copyright
holder who places the Library under this License may add an explicit geographical
distribution limitation excluding those countries, so that distribution is
permitted only in or among countries not thus excluded. In such case, this
License incorporates the limitation as if written in the body of this License.
13. The Free Software Foundation may publish revised and/or new versions of
the Lesser General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to address
new problems or concerns.
Each version is given a distinguishing version number. If the Library specifies
a version number of this License which applies to it and "any later version",
you have the option of following the terms and conditions either of that version
or of any later version published by the Free Software Foundation. If the
Library does not specify a license version number, you may choose any version
ever published by the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free programs
whose distribution conditions are incompatible with these, write to the author
to ask for permission. For software which is copyrighted by the Free Software
Foundation, write to the Free Software Foundation; we sometimes make exceptions
for this. Our decision will be guided by the two goals of preserving the free
status of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR
THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE
STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY
"AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE
OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE
THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE
OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA
OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES
OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH
HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest possible
use to the public, we recommend making it free software that everyone can
redistribute and change. You can do so by permitting redistribution under
these terms (or, alternatively, under the terms of the ordinary General Public
License).
To apply these terms, attach the following notices to the library. It is safest
to attach them to the start of each source file to most effectively convey
the exclusion of warranty; and each file should have at least the "copyright"
line and a pointer to where the full notice is found.
<one line to give the library's name and an idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the Free
Software Foundation; either version 2.1 of the License, or (at your option)
any later version.
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU Lesser General Public License along
with this library; if not, write to the Free Software Foundation, Inc., 51
Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your school,
if any, to sign a "copyright disclaimer" for the library, if necessary. Here
is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in
the library `Frob' (a library for tweaking knobs) written
by James Random Hacker.
< signature of Ty Coon > , 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

View File

@ -0,0 +1,5 @@
NOTE! This copyright does *not* cover user programs that use kernel services by normal system calls - this is merely considered normal use of the kernel, and does *not* fall under the heading of "derived work". Also note that the GPL below is copyrighted by the Free Software Foundation, but the instance of code that it refers to (the Linux kernel) is copyrighted by me and others who actually wrote it.
Also note that the only valid version of the GPL as far as the kernel is concerned is _this_ particular version of the license (ie v2, not v2.2 or v3.x or whatever), unless explicitly otherwise stated.
Linus Torvalds

View File

@ -0,0 +1,19 @@
MIT License Copyright (c) <year> <copyright holders>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is furnished
to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next
paragraph) shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,202 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
.. section-begin-libcamera
===========
libcamera
===========
**A complex camera support library for Linux, Android, and ChromeOS**
Cameras are complex devices that need heavy hardware image processing
operations. Control of the processing is based on advanced algorithms that must
run on a programmable processor. This has traditionally been implemented in a
dedicated MCU in the camera, but in embedded devices algorithms have been moved
to the main CPU to save cost. Blurring the boundary between camera devices and
Linux often left the user with no other option than a vendor-specific
closed-source solution.
To address this problem the Linux media community has very recently started
collaboration with the industry to develop a camera stack that will be
open-source-friendly while still protecting vendor core IP. libcamera was born
out of that collaboration and will offer modern camera support to Linux-based
systems, including traditional Linux distributions, ChromeOS and Android.
.. section-end-libcamera
.. section-begin-getting-started
Getting Started
---------------
To fetch the sources, build and install:
.. code::
git clone https://git.libcamera.org/libcamera/libcamera.git
cd libcamera
meson setup build
ninja -C build install
Dependencies
~~~~~~~~~~~~
The following Debian/Ubuntu packages are required for building libcamera.
Other distributions may have differing package names:
A C++ toolchain: [required]
Either {g++, clang}
Meson Build system: [required]
meson (>= 0.60) ninja-build pkg-config
for the libcamera core: [required]
libyaml-dev python3-yaml python3-ply python3-jinja2
for IPA module signing: [recommended]
Either libgnutls28-dev or libssl-dev, openssl
Without IPA module signing, all IPA modules will be isolated in a
separate process. This adds an unnecessary extra overhead at runtime.
for improved debugging: [optional]
libdw-dev libunwind-dev
libdw and libunwind provide backtraces to help debugging assertion
failures. Their functions overlap, libdw provides the most detailed
information, and libunwind is not needed if both libdw and the glibc
backtrace() function are available.
for device hotplug enumeration: [optional]
libudev-dev
for documentation: [optional]
python3-sphinx doxygen graphviz texlive-latex-extra
for gstreamer: [optional]
libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev
for Python bindings: [optional]
libpython3-dev pybind11-dev
for cam: [optional]
libevent-dev is required to support cam, however the following
optional dependencies bring more functionality to the cam test
tool:
- libdrm-dev: Enables the KMS sink
- libjpeg-dev: Enables MJPEG on the SDL sink
- libsdl2-dev: Enables the SDL sink
for qcam: [optional]
libtiff-dev qtbase5-dev qttools5-dev-tools
for tracing with lttng: [optional]
liblttng-ust-dev python3-jinja2 lttng-tools
for android: [optional]
libexif-dev libjpeg-dev
for Python bindings: [optional]
pybind11-dev
for lc-compliance: [optional]
libevent-dev libgtest-dev
for abi-compat.sh: [optional]
abi-compliance-checker
Basic testing with cam utility
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The ``cam`` utility can be used for basic testing. You can list the cameras
detected on the system with ``cam -l``, and capture ten frames from the first
camera and save them to disk with ``cam -c 1 --capture=10 --file``. See
``cam -h`` for more information about the ``cam`` tool.
In case of problems, a detailed debug log can be obtained from libcamera by
setting the ``LIBCAMERA_LOG_LEVELS`` environment variable:
.. code::
:~$ LIBCAMERA_LOG_LEVELS=*:DEBUG cam -l
Using GStreamer plugin
~~~~~~~~~~~~~~~~~~~~~~
To use the GStreamer plugin from the source tree, use the meson ``devenv``
command. This will create a new shell instance with the ``GST_PLUGIN_PATH``
environment set accordingly.
.. code::
meson devenv -C build
The debugging tool ``gst-launch-1.0`` can be used to construct a pipeline and
test it. The following pipeline will stream from the camera named "Camera 1"
onto the OpenGL accelerated display element on your system.
.. code::
gst-launch-1.0 libcamerasrc camera-name="Camera 1" ! queue ! glimagesink
To show the first camera found you can omit the camera-name property, or you
can list the cameras and their capabilities using:
.. code::
gst-device-monitor-1.0 Video
This will also show the supported stream sizes which can be manually selected
if desired with a pipeline such as:
.. code::
gst-launch-1.0 libcamerasrc ! 'video/x-raw,width=1280,height=720' ! \
queue ! glimagesink
The libcamerasrc element has two log categories, named libcamera-provider (for
the video device provider) and libcamerasrc (for the operation of the camera).
All corresponding debug messages can be enabled by setting the ``GST_DEBUG``
environment variable to ``libcamera*:7``.
Presently, to prevent element negotiation failures it is required to specify
the colorimetry and framerate as part of your pipeline construction. For
instance, to capture and encode as a JPEG stream and receive on another device
the following example could be used as a starting point:
.. code::
gst-launch-1.0 libcamerasrc ! \
video/x-raw,colorimetry=bt709,format=NV12,width=1280,height=720,framerate=30/1 ! \
queue ! jpegenc ! multipartmux ! \
tcpserversink host=0.0.0.0 port=5000
Which can be received on another device over the network with:
.. code::
gst-launch-1.0 tcpclientsrc host=$DEVICE_IP port=5000 ! \
multipartdemux ! jpegdec ! autovideosink
.. section-end-getting-started
Troubleshooting
~~~~~~~~~~~~~~~
Several users have reported issues with meson installation, crux of the issue
is a potential version mismatch between the version that root uses, and the
version that the normal user uses. On calling `ninja -C build`, it can't find
the build.ninja module. This is a snippet of the error message.
::
ninja: Entering directory `build'
ninja: error: loading 'build.ninja': No such file or directory
This can be solved in two ways:
1. Don't install meson again if it is already installed system-wide.
2. If a version of meson which is different from the system-wide version is
already installed, uninstall that meson using pip3, and install again without
the --user argument.

View File

@ -0,0 +1,919 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// FIXME: add well-defined names for cameras
#ifndef ANDROID_INCLUDE_CAMERA_COMMON_H
#define ANDROID_INCLUDE_CAMERA_COMMON_H
#include <stdint.h>
#include <stdbool.h>
#include <sys/cdefs.h>
#include <sys/types.h>
#include <cutils/native_handle.h>
#include <system/camera.h>
#include <system/camera_vendor_tags.h>
#include <hardware/hardware.h>
#include <hardware/gralloc.h>
__BEGIN_DECLS
/**
* The id of this module
*/
#define CAMERA_HARDWARE_MODULE_ID "camera"
/**
* Module versioning information for the Camera hardware module, based on
* camera_module_t.common.module_api_version. The two most significant hex
* digits represent the major version, and the two least significant represent
* the minor version.
*
*******************************************************************************
* Versions: 0.X - 1.X [CAMERA_MODULE_API_VERSION_1_0]
*
* Camera modules that report these version numbers implement the initial
* camera module HAL interface. All camera devices openable through this
* module support only version 1 of the camera device HAL. The device_version
* and static_camera_characteristics fields of camera_info are not valid. Only
* the android.hardware.Camera API can be supported by this module and its
* devices.
*
*******************************************************************************
* Version: 2.0 [CAMERA_MODULE_API_VERSION_2_0]
*
* Camera modules that report this version number implement the second version
* of the camera module HAL interface. Camera devices openable through this
* module may support either version 1.0 or version 2.0 of the camera device
* HAL interface. The device_version field of camera_info is always valid; the
* static_camera_characteristics field of camera_info is valid if the
* device_version field is 2.0 or higher.
*
*******************************************************************************
* Version: 2.1 [CAMERA_MODULE_API_VERSION_2_1]
*
* This camera module version adds support for asynchronous callbacks to the
* framework from the camera HAL module, which is used to notify the framework
* about changes to the camera module state. Modules that provide a valid
* set_callbacks() method must report at least this version number.
*
*******************************************************************************
* Version: 2.2 [CAMERA_MODULE_API_VERSION_2_2]
*
* This camera module version adds vendor tag support from the module, and
* deprecates the old vendor_tag_query_ops that were previously only
* accessible with a device open.
*
*******************************************************************************
* Version: 2.3 [CAMERA_MODULE_API_VERSION_2_3]
*
* This camera module version adds open legacy camera HAL device support.
* Framework can use it to open the camera device as lower device HAL version
* HAL device if the same device can support multiple device API versions.
* The standard hardware module open call (common.methods->open) continues
* to open the camera device with the latest supported version, which is
* also the version listed in camera_info_t.device_version.
*
*******************************************************************************
* Version: 2.4 [CAMERA_MODULE_API_VERSION_2_4]
*
* This camera module version adds below API changes:
*
* 1. Torch mode support. The framework can use it to turn on torch mode for
* any camera device that has a flash unit, without opening a camera device. The
* camera device has a higher priority accessing the flash unit than the camera
* module; opening a camera device will turn off the torch if it had been enabled
* through the module interface. When there are any resource conflicts, such as
* open() is called to open a camera device, the camera HAL module must notify the
* framework through the torch mode status callback that the torch mode has been
* turned off.
*
* 2. External camera (e.g. USB hot-plug camera) support. The API updates specify that
* the camera static info is only available when camera is connected and ready to
* use for external hot-plug cameras. Calls to get static info will be invalid
* calls when camera status is not CAMERA_DEVICE_STATUS_PRESENT. The frameworks
* will only count on device status change callbacks to manage the available external
* camera list.
*
* 3. Camera arbitration hints. This module version adds support for explicitly
* indicating the number of camera devices that can be simultaneously opened and used.
* To specify valid combinations of devices, the resource_cost and conflicting_devices
* fields should always be set in the camera_info structure returned by the
* get_camera_info call.
*
* 4. Module initialization method. This will be called by the camera service
* right after the HAL module is loaded, to allow for one-time initialization
* of the HAL. It is called before any other module methods are invoked.
*/
/**
* Predefined macros for currently-defined version numbers
*/
/**
* All module versions <= HARDWARE_MODULE_API_VERSION(1, 0xFF) must be treated
* as CAMERA_MODULE_API_VERSION_1_0
*/
#define CAMERA_MODULE_API_VERSION_1_0 HARDWARE_MODULE_API_VERSION(1, 0)
#define CAMERA_MODULE_API_VERSION_2_0 HARDWARE_MODULE_API_VERSION(2, 0)
#define CAMERA_MODULE_API_VERSION_2_1 HARDWARE_MODULE_API_VERSION(2, 1)
#define CAMERA_MODULE_API_VERSION_2_2 HARDWARE_MODULE_API_VERSION(2, 2)
#define CAMERA_MODULE_API_VERSION_2_3 HARDWARE_MODULE_API_VERSION(2, 3)
#define CAMERA_MODULE_API_VERSION_2_4 HARDWARE_MODULE_API_VERSION(2, 4)
#define CAMERA_MODULE_API_VERSION_CURRENT CAMERA_MODULE_API_VERSION_2_4
/**
* All device versions <= HARDWARE_DEVICE_API_VERSION(1, 0xFF) must be treated
* as CAMERA_DEVICE_API_VERSION_1_0
*/
#define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0) // DEPRECATED
#define CAMERA_DEVICE_API_VERSION_2_0 HARDWARE_DEVICE_API_VERSION(2, 0) // NO LONGER SUPPORTED
#define CAMERA_DEVICE_API_VERSION_2_1 HARDWARE_DEVICE_API_VERSION(2, 1) // NO LONGER SUPPORTED
#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0) // NO LONGER SUPPORTED
#define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1) // NO LONGER SUPPORTED
#define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
#define CAMERA_DEVICE_API_VERSION_3_3 HARDWARE_DEVICE_API_VERSION(3, 3)
#define CAMERA_DEVICE_API_VERSION_3_4 HARDWARE_DEVICE_API_VERSION(3, 4)
#define CAMERA_DEVICE_API_VERSION_3_5 HARDWARE_DEVICE_API_VERSION(3, 5)
// Device version 3.5 is current, older HAL camera device versions are not
// recommended for new devices.
#define CAMERA_DEVICE_API_VERSION_CURRENT CAMERA_DEVICE_API_VERSION_3_5
/**
* Defined in /system/media/camera/include/system/camera_metadata.h
*/
typedef struct camera_metadata camera_metadata_t;
typedef struct camera_info {
/**
* The direction that the camera faces to. See system/core/include/system/camera.h
* for camera facing definitions.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* It should be CAMERA_FACING_BACK or CAMERA_FACING_FRONT.
*
* CAMERA_MODULE_API_VERSION_2_4 or higher:
*
* It should be CAMERA_FACING_BACK, CAMERA_FACING_FRONT or
* CAMERA_FACING_EXTERNAL.
*/
int facing;
/**
* The orientation of the camera image. The value is the angle that the
* camera image needs to be rotated clockwise so it shows correctly on the
* display in its natural orientation. It should be 0, 90, 180, or 270.
*
* For example, suppose a device has a naturally tall screen. The
* back-facing camera sensor is mounted in landscape. You are looking at the
* screen. If the top side of the camera sensor is aligned with the right
* edge of the screen in natural orientation, the value should be 90. If the
* top side of a front-facing camera sensor is aligned with the right of the
* screen, the value should be 270.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* Valid in all camera_module versions.
*
* CAMERA_MODULE_API_VERSION_2_4 or higher:
*
* Valid if camera facing is CAMERA_FACING_BACK or CAMERA_FACING_FRONT,
* not valid if camera facing is CAMERA_FACING_EXTERNAL.
*/
int orientation;
/**
* The value of camera_device_t.common.version.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_1_0:
*
* Not valid. Can be assumed to be CAMERA_DEVICE_API_VERSION_1_0. Do
* not read this field.
*
* CAMERA_MODULE_API_VERSION_2_0 or higher:
*
* Always valid
*
*/
uint32_t device_version;
/**
* The camera's fixed characteristics, which include all static camera metadata
* specified in system/media/camera/docs/docs.html. This should be a sorted metadata
* buffer, and may not be modified or freed by the caller. The pointer should remain
* valid for the lifetime of the camera module, and values in it may not
* change after it is returned by get_camera_info().
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_1_0:
*
* Not valid. Extra characteristics are not available. Do not read this
* field.
*
* CAMERA_MODULE_API_VERSION_2_0 or higher:
*
* Valid if device_version >= CAMERA_DEVICE_API_VERSION_2_0. Do not read
* otherwise.
*
*/
const camera_metadata_t *static_camera_characteristics;
/**
* The total resource "cost" of using this camera, represented as an integer
* value in the range [0, 100] where 100 represents total usage of the shared
* resource that is the limiting bottleneck of the camera subsystem. This may
* be a very rough estimate, and is used as a hint to the camera service to
* determine when to disallow multiple applications from simultaneously
* opening different cameras advertised by the camera service.
*
* The camera service must be able to simultaneously open and use any
* combination of camera devices exposed by the HAL where the sum of
* the resource costs of these cameras is <= 100. For determining cost,
* each camera device must be assumed to be configured and operating at
* the maximally resource-consuming framerate and stream size settings
* available in the configuration settings exposed for that device through
* the camera metadata.
*
* The camera service may still attempt to simultaneously open combinations
* of camera devices with a total resource cost > 100. This may succeed or
* fail. If this succeeds, combinations of configurations that are not
* supported due to resource constraints from having multiple open devices
* should fail during the configure calls. If the total resource cost is
* <= 100, open and configure should never fail for any stream configuration
* settings or other device capabilities that would normally succeed for a
* device when it is the only open camera device.
*
* This field will be used to determine whether background applications are
* allowed to use this camera device while other applications are using other
* camera devices. Note: multiple applications will never be allowed by the
* camera service to simultaneously open the same camera device.
*
* Example use cases:
*
* Ex. 1: Camera Device 0 = Back Camera
* Camera Device 1 = Front Camera
* - Using both camera devices causes a large framerate slowdown due to
* limited ISP bandwidth.
*
* Configuration:
*
* Camera Device 0 - resource_cost = 51
* conflicting_devices = null
* Camera Device 1 - resource_cost = 51
* conflicting_devices = null
*
* Result:
*
* Since the sum of the resource costs is > 100, if a higher-priority
* application has either device open, no lower-priority applications will be
* allowed by the camera service to open either device. If a lower-priority
* application is using a device that a higher-priority subsequently attempts
* to open, the lower-priority application will be forced to disconnect the
* the device.
*
* If the highest-priority application chooses, it may still attempt to open
* both devices (since these devices are not listed as conflicting in the
* conflicting_devices fields), but usage of these devices may fail in the
* open or configure calls.
*
* Ex. 2: Camera Device 0 = Left Back Camera
* Camera Device 1 = Right Back Camera
* Camera Device 2 = Combined stereo camera using both right and left
* back camera sensors used by devices 0, and 1
* Camera Device 3 = Front Camera
* - Due to do hardware constraints, up to two cameras may be open at once. The
* combined stereo camera may never be used at the same time as either of the
* two back camera devices (device 0, 1), and typically requires too much
* bandwidth to use at the same time as the front camera (device 3).
*
* Configuration:
*
* Camera Device 0 - resource_cost = 50
* conflicting_devices = { 2 }
* Camera Device 1 - resource_cost = 50
* conflicting_devices = { 2 }
* Camera Device 2 - resource_cost = 100
* conflicting_devices = { 0, 1 }
* Camera Device 3 - resource_cost = 50
* conflicting_devices = null
*
* Result:
*
* Based on the conflicting_devices fields, the camera service guarantees that
* the following sets of open devices will never be allowed: { 1, 2 }, { 0, 2 }.
*
* Based on the resource_cost fields, if a high-priority foreground application
* is using camera device 0, a background application would be allowed to open
* camera device 1 or 3 (but would be forced to disconnect it again if the
* foreground application opened another device).
*
* The highest priority application may still attempt to simultaneously open
* devices 0, 2, and 3, but the HAL may fail in open or configure calls for
* this combination.
*
* Ex. 3: Camera Device 0 = Back Camera
* Camera Device 1 = Front Camera
* Camera Device 2 = Low-power Front Camera that uses the same
* sensor as device 1, but only exposes image stream
* resolutions that can be used in low-power mode
* - Using both front cameras (device 1, 2) at the same time is impossible due
* a shared physical sensor. Using the back and "high-power" front camera
* (device 1) may be impossible for some stream configurations due to hardware
* limitations, but the "low-power" front camera option may always be used as
* it has special dedicated hardware.
*
* Configuration:
*
* Camera Device 0 - resource_cost = 100
* conflicting_devices = null
* Camera Device 1 - resource_cost = 100
* conflicting_devices = { 2 }
* Camera Device 2 - resource_cost = 0
* conflicting_devices = { 1 }
* Result:
*
* Based on the conflicting_devices fields, the camera service guarantees that
* the following sets of open devices will never be allowed: { 1, 2 }.
*
* Based on the resource_cost fields, only the highest priority application
* may attempt to open both device 0 and 1 at the same time. If a higher-priority
* application is not using device 1 or 2, a low-priority background application
* may open device 2 (but will be forced to disconnect it if a higher-priority
* application subsequently opens device 1 or 2).
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* Not valid. Can be assumed to be 100. Do not read this field.
*
* CAMERA_MODULE_API_VERSION_2_4 or higher:
*
* Always valid.
*/
int resource_cost;
/**
* An array of camera device IDs represented as NULL-terminated strings
* indicating other devices that cannot be simultaneously opened while this
* camera device is in use.
*
* This field is intended to be used to indicate that this camera device
* is a composite of several other camera devices, or otherwise has
* hardware dependencies that prohibit simultaneous usage. If there are no
* dependencies, a NULL may be returned in this field to indicate this.
*
* The camera service will never simultaneously open any of the devices
* in this list while this camera device is open.
*
* The strings pointed to in this field will not be cleaned up by the camera
* service, and must remain while this device is plugged in.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* Not valid. Can be assumed to be NULL. Do not read this field.
*
* CAMERA_MODULE_API_VERSION_2_4 or higher:
*
* Always valid.
*/
char** conflicting_devices;
/**
* The length of the array given in the conflicting_devices field.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* Not valid. Can be assumed to be 0. Do not read this field.
*
* CAMERA_MODULE_API_VERSION_2_4 or higher:
*
* Always valid.
*/
size_t conflicting_devices_length;
} camera_info_t;
/**
* camera_device_status_t:
*
* The current status of the camera device, as provided by the HAL through the
* camera_module_callbacks.camera_device_status_change() call.
*
* At module load time, the framework will assume all camera devices are in the
* CAMERA_DEVICE_STATUS_PRESENT state. The HAL should invoke
* camera_module_callbacks::camera_device_status_change to inform the framework
* of any initially NOT_PRESENT devices.
*
* Allowed transitions:
* PRESENT -> NOT_PRESENT
* NOT_PRESENT -> ENUMERATING
* NOT_PRESENT -> PRESENT
* ENUMERATING -> PRESENT
* ENUMERATING -> NOT_PRESENT
*/
typedef enum camera_device_status {
/**
* The camera device is not currently connected, and opening it will return
* failure.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* Calls to get_camera_info must still succeed, and provide the same information
* it would if the camera were connected.
*
* CAMERA_MODULE_API_VERSION_2_4:
*
* The camera device at this status must return -EINVAL for get_camera_info call,
* as the device is not connected.
*/
CAMERA_DEVICE_STATUS_NOT_PRESENT = 0,
/**
* The camera device is connected, and opening it will succeed.
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* The information returned by get_camera_info cannot change due to this status
* change. By default, the framework will assume all devices are in this state.
*
* CAMERA_MODULE_API_VERSION_2_4:
*
* The information returned by get_camera_info will become valid after a device's
* status changes to this. By default, the framework will assume all devices are in
* this state.
*/
CAMERA_DEVICE_STATUS_PRESENT = 1,
/**
* The camera device is connected, but it is undergoing an enumeration and
* so opening the device will return -EBUSY.
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* Calls to get_camera_info must still succeed, as if the camera was in the
* PRESENT status.
*
* CAMERA_MODULE_API_VERSION_2_4:
*
* The camera device at this status must return -EINVAL for get_camera_info for call,
* as the device is not ready.
*/
CAMERA_DEVICE_STATUS_ENUMERATING = 2,
} camera_device_status_t;
/**
* torch_mode_status_t:
*
* The current status of the torch mode, as provided by the HAL through the
* camera_module_callbacks.torch_mode_status_change() call.
*
* The torch mode status of a camera device is applicable only when the camera
* device is present. The framework will not call set_torch_mode() to turn on
* torch mode of a camera device if the camera device is not present. At module
* load time, the framework will assume torch modes are in the
* TORCH_MODE_STATUS_AVAILABLE_OFF state if the camera device is present and
* android.flash.info.available is reported as true via get_camera_info() call.
*
* The behaviors of the camera HAL module that the framework expects in the
* following situations when a camera device's status changes:
* 1. A previously-disconnected camera device becomes connected.
* After camera_module_callbacks::camera_device_status_change() is invoked
* to inform the framework that the camera device is present, the framework
* will assume the camera device's torch mode is in
* TORCH_MODE_STATUS_AVAILABLE_OFF state. The camera HAL module does not need
* to invoke camera_module_callbacks::torch_mode_status_change() unless the
* flash unit is unavailable to use by set_torch_mode().
*
* 2. A previously-connected camera becomes disconnected.
* After camera_module_callbacks::camera_device_status_change() is invoked
* to inform the framework that the camera device is not present, the
* framework will not call set_torch_mode() for the disconnected camera
* device until its flash unit becomes available again. The camera HAL
* module does not need to invoke
* camera_module_callbacks::torch_mode_status_change() separately to inform
* that the flash unit has become unavailable.
*
* 3. open() is called to open a camera device.
* The camera HAL module must invoke
* camera_module_callbacks::torch_mode_status_change() for all flash units
* that have entered TORCH_MODE_STATUS_NOT_AVAILABLE state and can not be
* turned on by calling set_torch_mode() anymore due to this open() call.
* open() must not trigger TORCH_MODE_STATUS_AVAILABLE_OFF before
* TORCH_MODE_STATUS_NOT_AVAILABLE for all flash units that have become
* unavailable.
*
* 4. close() is called to close a camera device.
* The camera HAL module must invoke
* camera_module_callbacks::torch_mode_status_change() for all flash units
* that have entered TORCH_MODE_STATUS_AVAILABLE_OFF state and can be turned
* on by calling set_torch_mode() again because of enough resources freed
* up by this close() call.
*
* Note that the framework calling set_torch_mode() successfully must trigger
* TORCH_MODE_STATUS_AVAILABLE_OFF or TORCH_MODE_STATUS_AVAILABLE_ON callback
* for the given camera device. Additionally it must trigger
* TORCH_MODE_STATUS_AVAILABLE_OFF callbacks for other previously-on torch
* modes if HAL cannot keep multiple torch modes on simultaneously.
*/
typedef enum torch_mode_status {
/**
* The flash unit is no longer available and the torch mode can not be
* turned on by calling set_torch_mode(). If the torch mode is on, it
* will be turned off by HAL before HAL calls torch_mode_status_change().
*/
TORCH_MODE_STATUS_NOT_AVAILABLE = 0,
/**
* A torch mode has become off and available to be turned on via
* set_torch_mode(). This may happen in the following
* cases:
* 1. After the resources to turn on the torch mode have become available.
* 2. After set_torch_mode() is called to turn off the torch mode.
* 3. After the framework turned on the torch mode of some other camera
* device and HAL had to turn off the torch modes of any camera devices
* that were previously on.
*/
TORCH_MODE_STATUS_AVAILABLE_OFF = 1,
/**
* A torch mode has become on and available to be turned off via
* set_torch_mode(). This can happen only after set_torch_mode() is called
* to turn on the torch mode.
*/
TORCH_MODE_STATUS_AVAILABLE_ON = 2,
} torch_mode_status_t;
/**
* Callback functions for the camera HAL module to use to inform the framework
* of changes to the camera subsystem.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* Each callback is called only by HAL modules implementing the indicated
* version or higher of the HAL module API interface.
*
* CAMERA_MODULE_API_VERSION_2_1:
* camera_device_status_change()
*
* CAMERA_MODULE_API_VERSION_2_4:
* torch_mode_status_change()
*/
typedef struct camera_module_callbacks {
/**
* camera_device_status_change:
*
* Callback to the framework to indicate that the state of a specific camera
* device has changed. At module load time, the framework will assume all
* camera devices are in the CAMERA_DEVICE_STATUS_PRESENT state. The HAL
* must call this method to inform the framework of any initially
* NOT_PRESENT devices.
*
* This callback is added for CAMERA_MODULE_API_VERSION_2_1.
*
* camera_module_callbacks: The instance of camera_module_callbacks_t passed
* to the module with set_callbacks.
*
* camera_id: The ID of the camera device that has a new status.
*
* new_status: The new status code, one of the camera_device_status_t enums,
* or a platform-specific status.
*
*/
void (*camera_device_status_change)(const struct camera_module_callbacks*,
int camera_id,
int new_status);
/**
* torch_mode_status_change:
*
* Callback to the framework to indicate that the state of the torch mode
* of the flash unit associated with a specific camera device has changed.
* At module load time, the framework will assume the torch modes are in
* the TORCH_MODE_STATUS_AVAILABLE_OFF state if android.flash.info.available
* is reported as true via get_camera_info() call.
*
* This callback is added for CAMERA_MODULE_API_VERSION_2_4.
*
* camera_module_callbacks: The instance of camera_module_callbacks_t
* passed to the module with set_callbacks.
*
* camera_id: The ID of camera device whose flash unit has a new torch mode
* status.
*
* new_status: The new status code, one of the torch_mode_status_t enums.
*/
void (*torch_mode_status_change)(const struct camera_module_callbacks*,
const char* camera_id,
int new_status);
} camera_module_callbacks_t;
typedef struct camera_module {
/**
* Common methods of the camera module. This *must* be the first member of
* camera_module as users of this structure will cast a hw_module_t to
* camera_module pointer in contexts where it's known the hw_module_t
* references a camera_module.
*
* The return values for common.methods->open for camera_module are:
*
* 0: On a successful open of the camera device.
*
* -ENODEV: The camera device cannot be opened due to an internal
* error.
*
* -EINVAL: The input arguments are invalid, i.e. the id is invalid,
* and/or the module is invalid.
*
* -EBUSY: The camera device was already opened for this camera id
* (by using this method or open_legacy),
* regardless of the device HAL version it was opened as.
*
* -EUSERS: The maximal number of camera devices that can be
* opened concurrently were opened already, either by
* this method or the open_legacy method.
*
* All other return values from common.methods->open will be treated as
* -ENODEV.
*/
hw_module_t common;
/**
* get_number_of_cameras:
*
* Returns the number of camera devices accessible through the camera
* module. The camera devices are numbered 0 through N-1, where N is the
* value returned by this call. The name of the camera device for open() is
* simply the number converted to a string. That is, "0" for camera ID 0,
* "1" for camera ID 1.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_3 or lower:
*
* The value here must be static, and cannot change after the first call
* to this method.
*
* CAMERA_MODULE_API_VERSION_2_4 or higher:
*
* The value here must be static, and must count only built-in cameras,
* which have CAMERA_FACING_BACK or CAMERA_FACING_FRONT camera facing values
* (camera_info.facing). The HAL must not include the external cameras
* (camera_info.facing == CAMERA_FACING_EXTERNAL) into the return value
* of this call. Frameworks will use camera_device_status_change callback
* to manage number of external cameras.
*/
int (*get_number_of_cameras)(void);
/**
* get_camera_info:
*
* Return the static camera information for a given camera device. This
* information may not change for a camera device.
*
* Return values:
*
* 0: On a successful operation
*
* -ENODEV: The information cannot be provided due to an internal
* error.
*
* -EINVAL: The input arguments are invalid, i.e. the id is invalid,
* and/or the module is invalid.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_2_4 or higher:
*
* When a camera is disconnected, its camera id becomes invalid. Calling this
* this method with this invalid camera id will get -EINVAL and NULL camera
* static metadata (camera_info.static_camera_characteristics).
*/
int (*get_camera_info)(int camera_id, struct camera_info *info);
/**
* set_callbacks:
*
* Provide callback function pointers to the HAL module to inform framework
* of asynchronous camera module events. The framework will call this
* function once after initial camera HAL module load, after the
* get_number_of_cameras() method is called for the first time, and before
* any other calls to the module.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_1_0, CAMERA_MODULE_API_VERSION_2_0:
*
* Not provided by HAL module. Framework may not call this function.
*
* CAMERA_MODULE_API_VERSION_2_1:
*
* Valid to be called by the framework.
*
* Return values:
*
* 0: On a successful operation
*
* -ENODEV: The operation cannot be completed due to an internal
* error.
*
* -EINVAL: The input arguments are invalid, i.e. the callbacks are
* null
*/
int (*set_callbacks)(const camera_module_callbacks_t *callbacks);
/**
* get_vendor_tag_ops:
*
* Get methods to query for vendor extension metadata tag information. The
* HAL should fill in all the vendor tag operation methods, or leave ops
* unchanged if no vendor tags are defined.
*
* The vendor_tag_ops structure used here is defined in:
* system/media/camera/include/system/vendor_tags.h
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_1_x/2_0/2_1:
* Not provided by HAL module. Framework may not call this function.
*
* CAMERA_MODULE_API_VERSION_2_2:
* Valid to be called by the framework.
*/
void (*get_vendor_tag_ops)(vendor_tag_ops_t* ops);
/**
* open_legacy:
*
* Open a specific legacy camera HAL device if multiple device HAL API
* versions are supported by this camera HAL module. For example, if the
* camera module supports both CAMERA_DEVICE_API_VERSION_1_0 and
* CAMERA_DEVICE_API_VERSION_3_2 device API for the same camera id,
* framework can call this function to open the camera device as
* CAMERA_DEVICE_API_VERSION_1_0 device.
*
* This is an optional method. A Camera HAL module does not need to support
* more than one device HAL version per device, and such modules may return
* -ENOSYS for all calls to this method. For all older HAL device API
* versions that are not supported, it may return -EOPNOTSUPP. When above
* cases occur, The normal open() method (common.methods->open) will be
* used by the framework instead.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_1_x/2_0/2_1/2_2:
* Not provided by HAL module. Framework will not call this function.
*
* CAMERA_MODULE_API_VERSION_2_3:
* Valid to be called by the framework.
*
* Return values:
*
* 0: On a successful open of the camera device.
*
* -ENOSYS This method is not supported.
*
* -EOPNOTSUPP: The requested HAL version is not supported by this method.
*
* -EINVAL: The input arguments are invalid, i.e. the id is invalid,
* and/or the module is invalid.
*
* -EBUSY: The camera device was already opened for this camera id
* (by using this method or common.methods->open method),
* regardless of the device HAL version it was opened as.
*
* -EUSERS: The maximal number of camera devices that can be
* opened concurrently were opened already, either by
* this method or common.methods->open method.
*/
int (*open_legacy)(const struct hw_module_t* module, const char* id,
uint32_t halVersion, struct hw_device_t** device);
/**
* set_torch_mode:
*
* Turn on or off the torch mode of the flash unit associated with a given
* camera ID. If the operation is successful, HAL must notify the framework
* torch state by invoking
* camera_module_callbacks.torch_mode_status_change() with the new state.
*
* The camera device has a higher priority accessing the flash unit. When
* there are any resource conflicts, such as open() is called to open a
* camera device, HAL module must notify the framework through
* camera_module_callbacks.torch_mode_status_change() that the
* torch mode has been turned off and the torch mode state has become
* TORCH_MODE_STATUS_NOT_AVAILABLE. When resources to turn on torch mode
* become available again, HAL module must notify the framework through
* camera_module_callbacks.torch_mode_status_change() that the torch mode
* state has become TORCH_MODE_STATUS_AVAILABLE_OFF for set_torch_mode() to
* be called.
*
* When the framework calls set_torch_mode() to turn on the torch mode of a
* flash unit, if HAL cannot keep multiple torch modes on simultaneously,
* HAL should turn off the torch mode that was turned on by
* a previous set_torch_mode() call and notify the framework that the torch
* mode state of that flash unit has become TORCH_MODE_STATUS_AVAILABLE_OFF.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_1_x/2_0/2_1/2_2/2_3:
* Not provided by HAL module. Framework will not call this function.
*
* CAMERA_MODULE_API_VERSION_2_4:
* Valid to be called by the framework.
*
* Return values:
*
* 0: On a successful operation.
*
* -ENOSYS: The camera device does not support this operation. It is
* returned if and only if android.flash.info.available is
* false.
*
* -EBUSY: The camera device is already in use.
*
* -EUSERS: The resources needed to turn on the torch mode are not
* available, typically because other camera devices are
* holding the resources to make using the flash unit not
* possible.
*
* -EINVAL: camera_id is invalid.
*
*/
int (*set_torch_mode)(const char* camera_id, bool enabled);
/**
* init:
*
* This method is called by the camera service before any other methods
* are invoked, right after the camera HAL library has been successfully
* loaded. It may be left as NULL by the HAL module, if no initialization
* in needed.
*
* It can be used by HAL implementations to perform initialization and
* other one-time operations.
*
* Version information (based on camera_module_t.common.module_api_version):
*
* CAMERA_MODULE_API_VERSION_1_x/2_0/2_1/2_2/2_3:
* Not provided by HAL module. Framework will not call this function.
*
* CAMERA_MODULE_API_VERSION_2_4:
* If not NULL, will always be called by the framework once after the HAL
* module is loaded, before any other HAL module method is called.
*
* Return values:
*
* 0: On a successful operation.
*
* -ENODEV: Initialization cannot be completed due to an internal
* error. The HAL must be assumed to be in a nonfunctional
* state.
*
*/
int (*init)();
/* reserved for future use */
void* reserved[5];
} camera_module_t;
__END_DECLS
#endif /* ANDROID_INCLUDE_CAMERA_COMMON_H */

View File

@ -0,0 +1,174 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_FB_INTERFACE_H
#define ANDROID_FB_INTERFACE_H
#include <stdint.h>
#include <sys/cdefs.h>
#include <sys/types.h>
#include <cutils/native_handle.h>
#include <hardware/hardware.h>
__BEGIN_DECLS
#define GRALLOC_HARDWARE_FB0 "fb0"
/*****************************************************************************/
/*****************************************************************************/
typedef struct framebuffer_device_t {
/**
* Common methods of the framebuffer device. This *must* be the first member of
* framebuffer_device_t as users of this structure will cast a hw_device_t to
* framebuffer_device_t pointer in contexts where it's known the hw_device_t references a
* framebuffer_device_t.
*/
struct hw_device_t common;
/* flags describing some attributes of the framebuffer */
const uint32_t flags;
/* dimensions of the framebuffer in pixels */
const uint32_t width;
const uint32_t height;
/* frambuffer stride in pixels */
const int stride;
/* framebuffer pixel format */
const int format;
/* resolution of the framebuffer's display panel in pixel per inch*/
const float xdpi;
const float ydpi;
/* framebuffer's display panel refresh rate in frames per second */
const float fps;
/* min swap interval supported by this framebuffer */
const int minSwapInterval;
/* max swap interval supported by this framebuffer */
const int maxSwapInterval;
/* Number of framebuffers supported*/
const int numFramebuffers;
int reserved[7];
/*
* requests a specific swap-interval (same definition than EGL)
*
* Returns 0 on success or -errno on error.
*/
int (*setSwapInterval)(struct framebuffer_device_t* window,
int interval);
/*
* This hook is OPTIONAL.
*
* It is non NULL If the framebuffer driver supports "update-on-demand"
* and the given rectangle is the area of the screen that gets
* updated during (*post)().
*
* This is useful on devices that are able to DMA only a portion of
* the screen to the display panel, upon demand -- as opposed to
* constantly refreshing the panel 60 times per second, for instance.
*
* Only the area defined by this rectangle is guaranteed to be valid, that
* is, the driver is not allowed to post anything outside of this
* rectangle.
*
* The rectangle evaluated during (*post)() and specifies which area
* of the buffer passed in (*post)() shall to be posted.
*
* return -EINVAL if width or height <=0, or if left or top < 0
*/
int (*setUpdateRect)(struct framebuffer_device_t* window,
int left, int top, int width, int height);
/*
* Post <buffer> to the display (display it on the screen)
* The buffer must have been allocated with the
* GRALLOC_USAGE_HW_FB usage flag.
* buffer must be the same width and height as the display and must NOT
* be locked.
*
* The buffer is shown during the next VSYNC.
*
* If the same buffer is posted again (possibly after some other buffer),
* post() will block until the the first post is completed.
*
* Internally, post() is expected to lock the buffer so that a
* subsequent call to gralloc_module_t::(*lock)() with USAGE_RENDER or
* USAGE_*_WRITE will block until it is safe; that is typically once this
* buffer is shown and another buffer has been posted.
*
* Returns 0 on success or -errno on error.
*/
int (*post)(struct framebuffer_device_t* dev, buffer_handle_t buffer);
/*
* The (*compositionComplete)() method must be called after the
* compositor has finished issuing GL commands for client buffers.
*/
int (*compositionComplete)(struct framebuffer_device_t* dev);
/*
* This hook is OPTIONAL.
*
* If non NULL it will be caused by SurfaceFlinger on dumpsys
*/
void (*dump)(struct framebuffer_device_t* dev, char *buff, int buff_len);
/*
* (*enableScreen)() is used to either blank (enable=0) or
* unblank (enable=1) the screen this framebuffer is attached to.
*
* Returns 0 on success or -errno on error.
*/
int (*enableScreen)(struct framebuffer_device_t* dev, int enable);
void* reserved_proc[6];
} framebuffer_device_t;
/** convenience API for opening and closing a supported device */
static inline int framebuffer_open(const struct hw_module_t* module,
struct framebuffer_device_t** device) {
return module->methods->open(module,
GRALLOC_HARDWARE_FB0, TO_HW_DEVICE_T_OPEN(device));
}
static inline int framebuffer_close(struct framebuffer_device_t* device) {
return device->common.close(&device->common);
}
__END_DECLS
#endif // ANDROID_FB_INTERFACE_H

View File

@ -0,0 +1,416 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_GRALLOC_INTERFACE_H
#define ANDROID_GRALLOC_INTERFACE_H
#include <hardware/hardware.h>
#include <system/graphics.h>
#include <stdint.h>
#include <sys/cdefs.h>
#include <sys/types.h>
#include <cutils/native_handle.h>
#include <hardware/fb.h>
#include <hardware/hardware.h>
__BEGIN_DECLS
/**
* Module versioning information for the Gralloc hardware module, based on
* gralloc_module_t.common.module_api_version.
*
* Version History:
*
* GRALLOC_MODULE_API_VERSION_0_1:
* Initial Gralloc hardware module API.
*
* GRALLOC_MODULE_API_VERSION_0_2:
* Add support for flexible YCbCr format with (*lock_ycbcr)() method.
*
* GRALLOC_MODULE_API_VERSION_0_3:
* Add support for fence passing to/from lock/unlock.
*/
#define GRALLOC_MODULE_API_VERSION_0_1 HARDWARE_MODULE_API_VERSION(0, 1)
#define GRALLOC_MODULE_API_VERSION_0_2 HARDWARE_MODULE_API_VERSION(0, 2)
#define GRALLOC_MODULE_API_VERSION_0_3 HARDWARE_MODULE_API_VERSION(0, 3)
#define GRALLOC_DEVICE_API_VERSION_0_1 HARDWARE_DEVICE_API_VERSION(0, 1)
/**
* The id of this module
*/
#define GRALLOC_HARDWARE_MODULE_ID "gralloc"
/**
* Name of the graphics device to open
*/
#define GRALLOC_HARDWARE_GPU0 "gpu0"
enum {
/* buffer is never read in software */
GRALLOC_USAGE_SW_READ_NEVER = 0x00000000U,
/* buffer is rarely read in software */
GRALLOC_USAGE_SW_READ_RARELY = 0x00000002U,
/* buffer is often read in software */
GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003U,
/* mask for the software read values */
GRALLOC_USAGE_SW_READ_MASK = 0x0000000FU,
/* buffer is never written in software */
GRALLOC_USAGE_SW_WRITE_NEVER = 0x00000000U,
/* buffer is rarely written in software */
GRALLOC_USAGE_SW_WRITE_RARELY = 0x00000020U,
/* buffer is often written in software */
GRALLOC_USAGE_SW_WRITE_OFTEN = 0x00000030U,
/* mask for the software write values */
GRALLOC_USAGE_SW_WRITE_MASK = 0x000000F0U,
/* buffer will be used as an OpenGL ES texture */
GRALLOC_USAGE_HW_TEXTURE = 0x00000100U,
/* buffer will be used as an OpenGL ES render target */
GRALLOC_USAGE_HW_RENDER = 0x00000200U,
/* buffer will be used by the 2D hardware blitter */
GRALLOC_USAGE_HW_2D = 0x00000400U,
/* buffer will be used by the HWComposer HAL module */
GRALLOC_USAGE_HW_COMPOSER = 0x00000800U,
/* buffer will be used with the framebuffer device */
GRALLOC_USAGE_HW_FB = 0x00001000U,
/* buffer should be displayed full-screen on an external display when
* possible */
GRALLOC_USAGE_EXTERNAL_DISP = 0x00002000U,
/* Must have a hardware-protected path to external display sink for
* this buffer. If a hardware-protected path is not available, then
* either don't composite only this buffer (preferred) to the
* external sink, or (less desirable) do not route the entire
* composition to the external sink. */
GRALLOC_USAGE_PROTECTED = 0x00004000U,
/* buffer may be used as a cursor */
GRALLOC_USAGE_CURSOR = 0x00008000U,
/* buffer will be used with the HW video encoder */
GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000U,
/* buffer will be written by the HW camera pipeline */
GRALLOC_USAGE_HW_CAMERA_WRITE = 0x00020000U,
/* buffer will be read by the HW camera pipeline */
GRALLOC_USAGE_HW_CAMERA_READ = 0x00040000U,
/* buffer will be used as part of zero-shutter-lag queue */
GRALLOC_USAGE_HW_CAMERA_ZSL = 0x00060000U,
/* mask for the camera access values */
GRALLOC_USAGE_HW_CAMERA_MASK = 0x00060000U,
/* mask for the software usage bit-mask */
GRALLOC_USAGE_HW_MASK = 0x00071F00U,
/* buffer will be used as a RenderScript Allocation */
GRALLOC_USAGE_RENDERSCRIPT = 0x00100000U,
/* Set by the consumer to indicate to the producer that they may attach a
* buffer that they did not detach from the BufferQueue. Will be filtered
* out by GRALLOC_USAGE_ALLOC_MASK, so gralloc modules will not need to
* handle this flag. */
GRALLOC_USAGE_FOREIGN_BUFFERS = 0x00200000U,
/* Mask of all flags which could be passed to a gralloc module for buffer
* allocation. Any flags not in this mask do not need to be handled by
* gralloc modules. */
GRALLOC_USAGE_ALLOC_MASK = ~(GRALLOC_USAGE_FOREIGN_BUFFERS),
/* implementation-specific private usage flags */
GRALLOC_USAGE_PRIVATE_0 = 0x10000000U,
GRALLOC_USAGE_PRIVATE_1 = 0x20000000U,
GRALLOC_USAGE_PRIVATE_2 = 0x40000000U,
GRALLOC_USAGE_PRIVATE_3 = 0x80000000U,
GRALLOC_USAGE_PRIVATE_MASK = 0xF0000000U,
};
/*****************************************************************************/
/**
* Every hardware module must have a data structure named HAL_MODULE_INFO_SYM
* and the fields of this data structure must begin with hw_module_t
* followed by module specific information.
*/
typedef struct gralloc_module_t {
struct hw_module_t common;
/*
* (*registerBuffer)() must be called before a buffer_handle_t that has not
* been created with (*alloc_device_t::alloc)() can be used.
*
* This is intended to be used with buffer_handle_t's that have been
* received in this process through IPC.
*
* This function checks that the handle is indeed a valid one and prepares
* it for use with (*lock)() and (*unlock)().
*
* It is not necessary to call (*registerBuffer)() on a handle created
* with (*alloc_device_t::alloc)().
*
* returns an error if this buffer_handle_t is not valid.
*/
int (*registerBuffer)(struct gralloc_module_t const* module,
buffer_handle_t handle);
/*
* (*unregisterBuffer)() is called once this handle is no longer needed in
* this process. After this call, it is an error to call (*lock)(),
* (*unlock)(), or (*registerBuffer)().
*
* This function doesn't close or free the handle itself; this is done
* by other means, usually through libcutils's native_handle_close() and
* native_handle_free().
*
* It is an error to call (*unregisterBuffer)() on a buffer that wasn't
* explicitly registered first.
*/
int (*unregisterBuffer)(struct gralloc_module_t const* module,
buffer_handle_t handle);
/*
* The (*lock)() method is called before a buffer is accessed for the
* specified usage. This call may block, for instance if the h/w needs
* to finish rendering or if CPU caches need to be synchronized.
*
* The caller promises to modify only pixels in the area specified
* by (l,t,w,h).
*
* The content of the buffer outside of the specified area is NOT modified
* by this call.
*
* If usage specifies GRALLOC_USAGE_SW_*, vaddr is filled with the address
* of the buffer in virtual memory.
*
* Note calling (*lock)() on HAL_PIXEL_FORMAT_YCbCr_*_888 buffers will fail
* and return -EINVAL. These buffers must be locked with (*lock_ycbcr)()
* instead.
*
* THREADING CONSIDERATIONS:
*
* It is legal for several different threads to lock a buffer from
* read access, none of the threads are blocked.
*
* However, locking a buffer simultaneously for write or read/write is
* undefined, but:
* - shall not result in termination of the process
* - shall not block the caller
* It is acceptable to return an error or to leave the buffer's content
* into an indeterminate state.
*
* If the buffer was created with a usage mask incompatible with the
* requested usage flags here, -EINVAL is returned.
*
*/
int (*lock)(struct gralloc_module_t const* module,
buffer_handle_t handle, int usage,
int l, int t, int w, int h,
void** vaddr);
/*
* The (*unlock)() method must be called after all changes to the buffer
* are completed.
*/
int (*unlock)(struct gralloc_module_t const* module,
buffer_handle_t handle);
/* reserved for future use */
int (*perform)(struct gralloc_module_t const* module,
int operation, ... );
/*
* The (*lock_ycbcr)() method is like the (*lock)() method, with the
* difference that it fills a struct ycbcr with a description of the buffer
* layout, and zeroes out the reserved fields.
*
* If the buffer format is not compatible with a flexible YUV format (e.g.
* the buffer layout cannot be represented with the ycbcr struct), it
* will return -EINVAL.
*
* This method must work on buffers with HAL_PIXEL_FORMAT_YCbCr_*_888
* if supported by the device, as well as with any other format that is
* requested by the multimedia codecs when they are configured with a
* flexible-YUV-compatible color-format with android native buffers.
*
* Note that this method may also be called on buffers of other formats,
* including non-YUV formats.
*
* Added in GRALLOC_MODULE_API_VERSION_0_2.
*/
int (*lock_ycbcr)(struct gralloc_module_t const* module,
buffer_handle_t handle, int usage,
int l, int t, int w, int h,
struct android_ycbcr *ycbcr);
/*
* The (*lockAsync)() method is like the (*lock)() method except
* that the buffer's sync fence object is passed into the lock
* call instead of requiring the caller to wait for completion.
*
* The gralloc implementation takes ownership of the fenceFd and
* is responsible for closing it when no longer needed.
*
* Added in GRALLOC_MODULE_API_VERSION_0_3.
*/
int (*lockAsync)(struct gralloc_module_t const* module,
buffer_handle_t handle, int usage,
int l, int t, int w, int h,
void** vaddr, int fenceFd);
/*
* The (*unlockAsync)() method is like the (*unlock)() method
* except that a buffer sync fence object is returned from the
* lock call, representing the completion of any pending work
* performed by the gralloc implementation.
*
* The caller takes ownership of the fenceFd and is responsible
* for closing it when no longer needed.
*
* Added in GRALLOC_MODULE_API_VERSION_0_3.
*/
int (*unlockAsync)(struct gralloc_module_t const* module,
buffer_handle_t handle, int* fenceFd);
/*
* The (*lockAsync_ycbcr)() method is like the (*lock_ycbcr)()
* method except that the buffer's sync fence object is passed
* into the lock call instead of requiring the caller to wait for
* completion.
*
* The gralloc implementation takes ownership of the fenceFd and
* is responsible for closing it when no longer needed.
*
* Added in GRALLOC_MODULE_API_VERSION_0_3.
*/
int (*lockAsync_ycbcr)(struct gralloc_module_t const* module,
buffer_handle_t handle, int usage,
int l, int t, int w, int h,
struct android_ycbcr *ycbcr, int fenceFd);
/* reserved for future use */
void* reserved_proc[3];
} gralloc_module_t;
/*****************************************************************************/
/**
* Every device data structure must begin with hw_device_t
* followed by module specific public methods and attributes.
*/
typedef struct alloc_device_t {
struct hw_device_t common;
/*
* (*alloc)() Allocates a buffer in graphic memory with the requested
* parameters and returns a buffer_handle_t and the stride in pixels to
* allow the implementation to satisfy hardware constraints on the width
* of a pixmap (eg: it may have to be multiple of 8 pixels).
* The CALLER TAKES OWNERSHIP of the buffer_handle_t.
*
* If format is HAL_PIXEL_FORMAT_YCbCr_420_888, the returned stride must be
* 0, since the actual strides are available from the android_ycbcr
* structure.
*
* Returns 0 on success or -errno on error.
*/
int (*alloc)(struct alloc_device_t* dev,
int w, int h, int format, int usage,
buffer_handle_t* handle, int* stride);
/*
* (*free)() Frees a previously allocated buffer.
* Behavior is undefined if the buffer is still mapped in any process,
* but shall not result in termination of the program or security breaches
* (allowing a process to get access to another process' buffers).
* THIS FUNCTION TAKES OWNERSHIP of the buffer_handle_t which becomes
* invalid after the call.
*
* Returns 0 on success or -errno on error.
*/
int (*free)(struct alloc_device_t* dev,
buffer_handle_t handle);
/* This hook is OPTIONAL.
*
* If non NULL it will be caused by SurfaceFlinger on dumpsys
*/
void (*dump)(struct alloc_device_t *dev, char *buff, int buff_len);
void* reserved_proc[7];
} alloc_device_t;
/** convenience API for opening and closing a supported device */
static inline int gralloc_open(const struct hw_module_t* module,
struct alloc_device_t** device) {
return module->methods->open(module,
GRALLOC_HARDWARE_GPU0, TO_HW_DEVICE_T_OPEN(device));
}
static inline int gralloc_close(struct alloc_device_t* device) {
return device->common.close(&device->common);
}
/**
* map_usage_to_memtrack should be called after allocating a gralloc buffer.
*
* @param usage - it is the flag used when alloc function is called.
*
* This function maps the gralloc usage flags to appropriate memtrack bucket.
* GrallocHAL implementers and users should make an additional ION_IOCTL_TAG
* call using the memtrack tag returned by this function. This will help the
* in-kernel memtack to categorize the memory allocated by different processes
* according to their usage.
*
*/
static inline const char* map_usage_to_memtrack(uint32_t usage) {
usage &= GRALLOC_USAGE_ALLOC_MASK;
if ((usage & GRALLOC_USAGE_HW_CAMERA_WRITE) != 0) {
return "camera";
} else if ((usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0 ||
(usage & GRALLOC_USAGE_EXTERNAL_DISP) != 0) {
return "video";
} else if ((usage & GRALLOC_USAGE_HW_RENDER) != 0 ||
(usage & GRALLOC_USAGE_HW_TEXTURE) != 0) {
return "gl";
} else if ((usage & GRALLOC_USAGE_HW_CAMERA_READ) != 0) {
return "camera";
} else if ((usage & GRALLOC_USAGE_SW_READ_MASK) != 0 ||
(usage & GRALLOC_USAGE_SW_WRITE_MASK) != 0) {
return "cpu";
}
return "graphics";
}
__END_DECLS
#endif // ANDROID_GRALLOC_INTERFACE_H

View File

@ -0,0 +1,245 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_INCLUDE_HARDWARE_HARDWARE_H
#define ANDROID_INCLUDE_HARDWARE_HARDWARE_H
#include <stdint.h>
#include <sys/cdefs.h>
#include <cutils/native_handle.h>
#include <system/graphics.h>
__BEGIN_DECLS
/*
* Value for the hw_module_t.tag field
*/
#define MAKE_TAG_CONSTANT(A,B,C,D) (((A) << 24) | ((B) << 16) | ((C) << 8) | (D))
#define HARDWARE_MODULE_TAG MAKE_TAG_CONSTANT('H', 'W', 'M', 'T')
#define HARDWARE_DEVICE_TAG MAKE_TAG_CONSTANT('H', 'W', 'D', 'T')
#define HARDWARE_MAKE_API_VERSION(maj,min) \
((((maj) & 0xff) << 8) | ((min) & 0xff))
#define HARDWARE_MAKE_API_VERSION_2(maj,min,hdr) \
((((maj) & 0xff) << 24) | (((min) & 0xff) << 16) | ((hdr) & 0xffff))
#define HARDWARE_API_VERSION_2_MAJ_MIN_MASK 0xffff0000
#define HARDWARE_API_VERSION_2_HEADER_MASK 0x0000ffff
/*
* The current HAL API version.
*
* All module implementations must set the hw_module_t.hal_api_version field
* to this value when declaring the module with HAL_MODULE_INFO_SYM.
*
* Note that previous implementations have always set this field to 0.
* Therefore, libhardware HAL API will always consider versions 0.0 and 1.0
* to be 100% binary compatible.
*
*/
#define HARDWARE_HAL_API_VERSION HARDWARE_MAKE_API_VERSION(1, 0)
/*
* Helper macros for module implementors.
*
* The derived modules should provide convenience macros for supported
* versions so that implementations can explicitly specify module/device
* versions at definition time.
*
* Use this macro to set the hw_module_t.module_api_version field.
*/
#define HARDWARE_MODULE_API_VERSION(maj,min) HARDWARE_MAKE_API_VERSION(maj,min)
#define HARDWARE_MODULE_API_VERSION_2(maj,min,hdr) HARDWARE_MAKE_API_VERSION_2(maj,min,hdr)
/*
* Use this macro to set the hw_device_t.version field
*/
#define HARDWARE_DEVICE_API_VERSION(maj,min) HARDWARE_MAKE_API_VERSION(maj,min)
#define HARDWARE_DEVICE_API_VERSION_2(maj,min,hdr) HARDWARE_MAKE_API_VERSION_2(maj,min,hdr)
struct hw_module_t;
struct hw_module_methods_t;
struct hw_device_t;
/**
* Every hardware module must have a data structure named HAL_MODULE_INFO_SYM
* and the fields of this data structure must begin with hw_module_t
* followed by module specific information.
*/
typedef struct hw_module_t {
/** tag must be initialized to HARDWARE_MODULE_TAG */
uint32_t tag;
/**
* The API version of the implemented module. The module owner is
* responsible for updating the version when a module interface has
* changed.
*
* The derived modules such as gralloc and audio own and manage this field.
* The module user must interpret the version field to decide whether or
* not to inter-operate with the supplied module implementation.
* For example, SurfaceFlinger is responsible for making sure that
* it knows how to manage different versions of the gralloc-module API,
* and AudioFlinger must know how to do the same for audio-module API.
*
* The module API version should include a major and a minor component.
* For example, version 1.0 could be represented as 0x0100. This format
* implies that versions 0x0100-0x01ff are all API-compatible.
*
* In the future, libhardware will expose a hw_get_module_version()
* (or equivalent) function that will take minimum/maximum supported
* versions as arguments and would be able to reject modules with
* versions outside of the supplied range.
*/
uint16_t module_api_version;
#define version_major module_api_version
/**
* version_major/version_minor defines are supplied here for temporary
* source code compatibility. They will be removed in the next version.
* ALL clients must convert to the new version format.
*/
/**
* The API version of the HAL module interface. This is meant to
* version the hw_module_t, hw_module_methods_t, and hw_device_t
* structures and definitions.
*
* The HAL interface owns this field. Module users/implementations
* must NOT rely on this value for version information.
*
* Presently, 0 is the only valid value.
*/
uint16_t hal_api_version;
#define version_minor hal_api_version
/** Identifier of module */
const char *id;
/** Name of this module */
const char *name;
/** Author/owner/implementor of the module */
const char *author;
/** Modules methods */
struct hw_module_methods_t* methods;
/** module's dso */
void* dso;
#ifdef __LP64__
uint64_t reserved[32-7];
#else
/** padding to 128 bytes, reserved for future use */
uint32_t reserved[32-7];
#endif
} hw_module_t;
typedef struct hw_module_methods_t {
/** Open a specific device */
int (*open)(const struct hw_module_t* module, const char* id,
struct hw_device_t** device);
} hw_module_methods_t;
/**
* Every device data structure must begin with hw_device_t
* followed by module specific public methods and attributes.
*/
typedef struct hw_device_t {
/** tag must be initialized to HARDWARE_DEVICE_TAG */
uint32_t tag;
/**
* Version of the module-specific device API. This value is used by
* the derived-module user to manage different device implementations.
*
* The module user is responsible for checking the module_api_version
* and device version fields to ensure that the user is capable of
* communicating with the specific module implementation.
*
* One module can support multiple devices with different versions. This
* can be useful when a device interface changes in an incompatible way
* but it is still necessary to support older implementations at the same
* time. One such example is the Camera 2.0 API.
*
* This field is interpreted by the module user and is ignored by the
* HAL interface itself.
*/
uint32_t version;
/** reference to the module this device belongs to */
struct hw_module_t* module;
/** padding reserved for future use */
#ifdef __LP64__
uint64_t reserved[12];
#else
uint32_t reserved[12];
#endif
/** Close this device */
int (*close)(struct hw_device_t* device);
} hw_device_t;
#ifdef __cplusplus
#define TO_HW_DEVICE_T_OPEN(x) reinterpret_cast<struct hw_device_t**>(x)
#else
#define TO_HW_DEVICE_T_OPEN(x) (struct hw_device_t**)(x)
#endif
/**
* Name of the hal_module_info
*/
#define HAL_MODULE_INFO_SYM HMI
/**
* Name of the hal_module_info as a string
*/
#define HAL_MODULE_INFO_SYM_AS_STR "HMI"
/**
* Get the module info associated with a module by id.
*
* @return: 0 == success, <0 == error and *module == NULL
*/
int hw_get_module(const char *id, const struct hw_module_t **module);
/**
* Get the module info associated with a module instance by class 'class_id'
* and instance 'inst'.
*
* Some modules types necessitate multiple instances. For example audio supports
* multiple concurrent interfaces and thus 'audio' is the module class
* and 'primary' or 'a2dp' are module interfaces. This implies that the files
* providing these modules would be named audio.primary.<variant>.so and
* audio.a2dp.<variant>.so
*
* @return: 0 == success, <0 == error and *module == NULL
*/
int hw_get_module_by_class(const char *class_id, const char *inst,
const struct hw_module_t **module);
__END_DECLS
#endif /* ANDROID_INCLUDE_HARDWARE_HARDWARE_H */

View File

@ -0,0 +1,7 @@
# SPDX-License-Identifier: CC0-1.0
android_includes = ([
include_directories('hardware/libhardware/include/'),
include_directories('metadata/'),
include_directories('system/core/include'),
])

View File

@ -0,0 +1,101 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H
#define SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H
#include <system/camera_vendor_tags.h>
/**
* Error codes returned by vendor tags ops operations. These are intended
* to be used by all framework code that uses the return values from the
* vendor operations object.
*/
#define VENDOR_SECTION_NAME_ERR NULL
#define VENDOR_TAG_NAME_ERR NULL
#define VENDOR_TAG_COUNT_ERR (-1)
#define VENDOR_TAG_TYPE_ERR (-1)
#ifdef __cplusplus
extern "C" {
#endif
/** **These are private functions for use only by the camera framework.** **/
/**
* Set the global vendor tag operations object used to define vendor tag
* structure when parsing camera metadata with functions defined in
* system/media/camera/include/camera_metadata.h.
*/
ANDROID_API
int set_camera_metadata_vendor_ops(const vendor_tag_ops_t *query_ops);
/**
* Set the global vendor tag cache operations object used to define vendor tag
* structure when parsing camera metadata with functions defined in
* system/media/camera/include/camera_metadata.h.
*/
ANDROID_API
int set_camera_metadata_vendor_cache_ops(
const struct vendor_tag_cache_ops *query_cache_ops);
/**
* Set the vendor id for a particular metadata buffer.
*/
ANDROID_API
void set_camera_metadata_vendor_id(camera_metadata_t *meta,
metadata_vendor_id_t id);
/**
* Retrieve the vendor id for a particular metadata buffer.
*/
ANDROID_API
metadata_vendor_id_t get_camera_metadata_vendor_id(
const camera_metadata_t *meta);
/**
* Retrieve the type of a tag. Returns -1 if no such tag is defined.
*/
ANDROID_API
int get_local_camera_metadata_tag_type_vendor_id(uint32_t tag,
metadata_vendor_id_t id);
/**
* Retrieve the name of a tag. Returns NULL if no such tag is defined.
*/
ANDROID_API
const char *get_local_camera_metadata_tag_name_vendor_id(uint32_t tag,
metadata_vendor_id_t id);
/**
* Retrieve the name of a tag section. Returns NULL if no such tag is defined.
*/
ANDROID_API
const char *get_local_camera_metadata_section_name_vendor_id(uint32_t tag,
metadata_vendor_id_t id);
/**
* Retrieve the type of a tag. Returns -1 if no such tag is defined.
*/
ANDROID_API
int get_local_camera_metadata_tag_type_vendor_id(uint32_t tag,
metadata_vendor_id_t id);
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H */

View File

@ -0,0 +1,581 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_METADATA_H
#define SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_METADATA_H
#include <string.h>
#include <stdint.h>
#include <cutils/compiler.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* Tag hierarchy and enum definitions for camera_metadata_entry
* =============================================================================
*/
/**
* Main enum definitions are in a separate file to make it easy to
* maintain
*/
#include "camera_metadata_tags.h"
/**
* Enum range for each top-level category
*/
ANDROID_API
extern unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2];
ANDROID_API
extern const char *camera_metadata_section_names[ANDROID_SECTION_COUNT];
/**
* Type definitions for camera_metadata_entry
* =============================================================================
*/
enum {
// Unsigned 8-bit integer (uint8_t)
TYPE_BYTE = 0,
// Signed 32-bit integer (int32_t)
TYPE_INT32 = 1,
// 32-bit float (float)
TYPE_FLOAT = 2,
// Signed 64-bit integer (int64_t)
TYPE_INT64 = 3,
// 64-bit float (double)
TYPE_DOUBLE = 4,
// A 64-bit fraction (camera_metadata_rational_t)
TYPE_RATIONAL = 5,
// Number of type fields
NUM_TYPES
};
typedef struct camera_metadata_rational {
int32_t numerator;
int32_t denominator;
} camera_metadata_rational_t;
/**
* A reference to a metadata entry in a buffer.
*
* The data union pointers point to the real data in the buffer, and can be
* modified in-place if the count does not need to change. The count is the
* number of entries in data of the entry's type, not a count of bytes.
*/
typedef struct camera_metadata_entry {
size_t index;
uint32_t tag;
uint8_t type;
size_t count;
union {
uint8_t *u8;
int32_t *i32;
float *f;
int64_t *i64;
double *d;
camera_metadata_rational_t *r;
} data;
} camera_metadata_entry_t;
/**
* A read-only reference to a metadata entry in a buffer. Identical to
* camera_metadata_entry in layout
*/
typedef struct camera_metadata_ro_entry {
size_t index;
uint32_t tag;
uint8_t type;
size_t count;
union {
const uint8_t *u8;
const int32_t *i32;
const float *f;
const int64_t *i64;
const double *d;
const camera_metadata_rational_t *r;
} data;
} camera_metadata_ro_entry_t;
/**
* Size in bytes of each entry type
*/
ANDROID_API
extern const size_t camera_metadata_type_size[NUM_TYPES];
/**
* Human-readable name of each entry type
*/
ANDROID_API
extern const char* camera_metadata_type_names[NUM_TYPES];
/**
* Main definitions for the metadata entry and array structures
* =============================================================================
*/
/**
* A packet of metadata. This is a list of metadata entries, each of which has
* an integer tag to identify its meaning, 'type' and 'count' field, and the
* data, which contains a 'count' number of entries of type 'type'. The packet
* has a fixed capacity for entries and for extra data. A new entry uses up one
* entry slot, and possibly some amount of data capacity; the function
* calculate_camera_metadata_entry_data_size() provides the amount of data
* capacity that would be used up by an entry.
*
* Entries are not sorted by default, and are not forced to be unique - multiple
* entries with the same tag are allowed. The packet will not dynamically resize
* when full.
*
* The packet is contiguous in memory, with size in bytes given by
* get_camera_metadata_size(). Therefore, it can be copied safely with memcpy()
* to a buffer of sufficient size. The copy_camera_metadata() function is
* intended for eliminating unused capacity in the destination packet.
*/
struct camera_metadata;
typedef struct camera_metadata camera_metadata_t;
/**
* Functions for manipulating camera metadata
* =============================================================================
*
* NOTE: Unless otherwise specified, functions that return type "int"
* return 0 on success, and non-0 value on error.
*/
/**
* Allocate a new camera_metadata structure, with some initial space for entries
* and extra data. The entry_capacity is measured in entry counts, and
* data_capacity in bytes. The resulting structure is all contiguous in memory,
* and can be freed with free_camera_metadata().
*/
ANDROID_API
camera_metadata_t *allocate_camera_metadata(size_t entry_capacity,
size_t data_capacity);
/**
* Get the required alignment of a packet of camera metadata, which is the
* maximal alignment of the embedded camera_metadata, camera_metadata_buffer_entry,
* and camera_metadata_data.
*/
ANDROID_API
size_t get_camera_metadata_alignment();
/**
* Allocate a new camera_metadata structure of size src_size. Copy the data,
* ignoring alignment, and then attempt validation. If validation
* fails, free the memory and return NULL. Otherwise return the pointer.
*
* The resulting pointer can be freed with free_camera_metadata().
*/
ANDROID_API
camera_metadata_t *allocate_copy_camera_metadata_checked(
const camera_metadata_t *src,
size_t src_size);
/**
* Place a camera metadata structure into an existing buffer. Returns NULL if
* the buffer is too small for the requested number of reserved entries and
* bytes of data. The entry_capacity is measured in entry counts, and
* data_capacity in bytes. If the buffer is larger than the required space,
* unused space will be left at the end. If successful, returns a pointer to the
* metadata header placed at the start of the buffer. It is the caller's
* responsibility to free the original buffer; do not call
* free_camera_metadata() with the returned pointer.
*/
ANDROID_API
camera_metadata_t *place_camera_metadata(void *dst, size_t dst_size,
size_t entry_capacity,
size_t data_capacity);
/**
* Free a camera_metadata structure. Should only be used with structures
* allocated with allocate_camera_metadata().
*/
ANDROID_API
void free_camera_metadata(camera_metadata_t *metadata);
/**
* Calculate the buffer size needed for a metadata structure of entry_count
* metadata entries, needing a total of data_count bytes of extra data storage.
*/
ANDROID_API
size_t calculate_camera_metadata_size(size_t entry_count,
size_t data_count);
/**
* Get current size of entire metadata structure in bytes, including reserved
* but unused space.
*/
ANDROID_API
size_t get_camera_metadata_size(const camera_metadata_t *metadata);
/**
* Get size of entire metadata buffer in bytes, not including reserved but
* unused space. This is the amount of space needed by copy_camera_metadata for
* its dst buffer.
*/
ANDROID_API
size_t get_camera_metadata_compact_size(const camera_metadata_t *metadata);
/**
* Get the current number of entries in the metadata packet.
*
* metadata packet must be valid, which can be checked before the call with
* validate_camera_metadata_structure().
*/
ANDROID_API
size_t get_camera_metadata_entry_count(const camera_metadata_t *metadata);
/**
* Get the maximum number of entries that could fit in the metadata packet.
*/
ANDROID_API
size_t get_camera_metadata_entry_capacity(const camera_metadata_t *metadata);
/**
* Get the current count of bytes used for value storage in the metadata packet.
*/
ANDROID_API
size_t get_camera_metadata_data_count(const camera_metadata_t *metadata);
/**
* Get the maximum count of bytes that could be used for value storage in the
* metadata packet.
*/
ANDROID_API
size_t get_camera_metadata_data_capacity(const camera_metadata_t *metadata);
/**
* Copy a metadata structure to a memory buffer, compacting it along the
* way. That is, in the copied structure, entry_count == entry_capacity, and
* data_count == data_capacity.
*
* If dst_size > get_camera_metadata_compact_size(), the unused bytes are at the
* end of the buffer. If dst_size < get_camera_metadata_compact_size(), returns
* NULL. Otherwise returns a pointer to the metadata structure header placed at
* the start of dst.
*
* Since the buffer was not allocated by allocate_camera_metadata, the caller is
* responsible for freeing the underlying buffer when needed; do not call
* free_camera_metadata.
*/
ANDROID_API
camera_metadata_t *copy_camera_metadata(void *dst, size_t dst_size,
const camera_metadata_t *src);
// Non-zero return values for validate_camera_metadata_structure
enum {
CAMERA_METADATA_VALIDATION_ERROR = 1,
CAMERA_METADATA_VALIDATION_SHIFTED = 2,
};
/**
* Validate that a metadata is structurally sane. That is, its internal
* state is such that we won't get buffer overflows or run into other
* 'impossible' issues when calling the other API functions.
*
* This is useful in particular after copying the binary metadata blob
* from an untrusted source, since passing this check means the data is at least
* consistent.
*
* The expected_size argument is optional.
*
* Returns 0: on success
* CAMERA_METADATA_VALIDATION_ERROR: on error
* CAMERA_METADATA_VALIDATION_SHIFTED: when the data is not properly aligned, but can be
* used as input of clone_camera_metadata and the returned metadata will be valid.
*
*/
ANDROID_API
int validate_camera_metadata_structure(const camera_metadata_t *metadata,
const size_t *expected_size);
/**
* Append camera metadata in src to an existing metadata structure in dst. This
* does not resize the destination structure, so if it is too small, a non-zero
* value is returned. On success, 0 is returned. Appending onto a sorted
* structure results in a non-sorted combined structure.
*/
ANDROID_API
int append_camera_metadata(camera_metadata_t *dst, const camera_metadata_t *src);
/**
* Clone an existing metadata buffer, compacting along the way. This is
* equivalent to allocating a new buffer of the minimum needed size, then
* appending the buffer to be cloned into the new buffer. The resulting buffer
* can be freed with free_camera_metadata(). Returns NULL if cloning failed.
*/
ANDROID_API
camera_metadata_t *clone_camera_metadata(const camera_metadata_t *src);
/**
* Calculate the number of bytes of extra data a given metadata entry will take
* up. That is, if entry of 'type' with a payload of 'data_count' values is
* added, how much will the value returned by get_camera_metadata_data_count()
* be increased? This value may be zero, if no extra data storage is needed.
*/
ANDROID_API
size_t calculate_camera_metadata_entry_data_size(uint8_t type,
size_t data_count);
/**
* Add a metadata entry to a metadata structure. Returns 0 if the addition
* succeeded. Returns a non-zero value if there is insufficient reserved space
* left to add the entry, or if the tag is unknown. data_count is the number of
* entries in the data array of the tag's type, not a count of
* bytes. Vendor-defined tags can not be added using this method, unless
* set_vendor_tag_query_ops() has been called first. Entries are always added to
* the end of the structure (highest index), so after addition, a
* previously-sorted array will be marked as unsorted.
*
* Returns 0 on success. A non-0 value is returned on error.
*/
ANDROID_API
int add_camera_metadata_entry(camera_metadata_t *dst,
uint32_t tag,
const void *data,
size_t data_count);
/**
* Sort the metadata buffer for fast searching. If already marked as sorted,
* does nothing. Adding or appending entries to the buffer will place the buffer
* back into an unsorted state.
*
* Returns 0 on success. A non-0 value is returned on error.
*/
ANDROID_API
int sort_camera_metadata(camera_metadata_t *dst);
/**
* Get metadata entry at position index in the metadata buffer.
* Index must be less than entry count, which is returned by
* get_camera_metadata_entry_count().
*
* src and index are inputs; the passed-in entry is updated with the details of
* the entry. The data pointer points to the real data in the buffer, and can be
* updated as long as the data count does not change.
*
* Returns 0 on success. A non-0 value is returned on error.
*/
ANDROID_API
int get_camera_metadata_entry(camera_metadata_t *src,
size_t index,
camera_metadata_entry_t *entry);
/**
* Get metadata entry at position index, but disallow editing the data.
*/
ANDROID_API
int get_camera_metadata_ro_entry(const camera_metadata_t *src,
size_t index,
camera_metadata_ro_entry_t *entry);
/**
* Find an entry with given tag value. If not found, returns -ENOENT. Otherwise,
* returns entry contents like get_camera_metadata_entry.
*
* If multiple entries with the same tag exist, does not have any guarantees on
* which is returned. To speed up searching for tags, sort the metadata
* structure first by calling sort_camera_metadata().
*/
ANDROID_API
int find_camera_metadata_entry(camera_metadata_t *src,
uint32_t tag,
camera_metadata_entry_t *entry);
/**
* Find an entry with given tag value, but disallow editing the data
*/
ANDROID_API
int find_camera_metadata_ro_entry(const camera_metadata_t *src,
uint32_t tag,
camera_metadata_ro_entry_t *entry);
/**
* Delete an entry at given index. This is an expensive operation, since it
* requires repacking entries and possibly entry data. This also invalidates any
* existing camera_metadata_entry.data pointers to this buffer. Sorting is
* maintained.
*/
ANDROID_API
int delete_camera_metadata_entry(camera_metadata_t *dst,
size_t index);
/**
* Updates a metadata entry with new data. If the data size is changing, may
* need to adjust the data array, making this an O(N) operation. If the data
* size is the same or still fits in the entry space, this is O(1). Maintains
* sorting, but invalidates camera_metadata_entry instances that point to the
* updated entry. If a non-NULL value is passed in to entry, the entry structure
* is updated to match the new buffer state. Returns a non-zero value if there
* is no room for the new data in the buffer.
*/
ANDROID_API
int update_camera_metadata_entry(camera_metadata_t *dst,
size_t index,
const void *data,
size_t data_count,
camera_metadata_entry_t *updated_entry);
/**
* Retrieve human-readable name of section the tag is in. Returns NULL if
* no such tag is defined. Returns NULL for tags in the vendor section, unless
* set_vendor_tag_query_ops() has been used.
*/
ANDROID_API
const char *get_camera_metadata_section_name(uint32_t tag);
/**
* Retrieve human-readable name of tag (not including section). Returns NULL if
* no such tag is defined. Returns NULL for tags in the vendor section, unless
* set_vendor_tag_query_ops() has been used.
*/
ANDROID_API
const char *get_camera_metadata_tag_name(uint32_t tag);
/**
* Retrieve the type of a tag. Returns -1 if no such tag is defined. Returns -1
* for tags in the vendor section, unless set_vendor_tag_query_ops() has been
* used.
*/
ANDROID_API
int get_camera_metadata_tag_type(uint32_t tag);
/**
* Retrieve human-readable name of section the tag is in. Returns NULL if
* no such tag is defined.
*/
ANDROID_API
const char *get_local_camera_metadata_section_name(uint32_t tag,
const camera_metadata_t *meta);
/**
* Retrieve human-readable name of tag (not including section). Returns NULL if
* no such tag is defined.
*/
ANDROID_API
const char *get_local_camera_metadata_tag_name(uint32_t tag,
const camera_metadata_t *meta);
/**
* Retrieve the type of a tag. Returns -1 if no such tag is defined.
*/
ANDROID_API
int get_local_camera_metadata_tag_type(uint32_t tag,
const camera_metadata_t *meta);
/**
* Set up vendor-specific tag query methods. These are needed to properly add
* entries with vendor-specified tags and to use the
* get_camera_metadata_section_name, _tag_name, and _tag_type methods with
* vendor tags. Returns 0 on success.
*
* **DEPRECATED** - Please use vendor_tag_ops defined in camera_vendor_tags.h
* instead.
*/
typedef struct vendor_tag_query_ops vendor_tag_query_ops_t;
struct vendor_tag_query_ops {
/**
* Get vendor section name for a vendor-specified entry tag. Only called for
* tags >= 0x80000000. The section name must start with the name of the
* vendor in the Java package style. For example, CameraZoom inc must prefix
* their sections with "com.camerazoom." Must return NULL if the tag is
* outside the bounds of vendor-defined sections.
*/
const char *(*get_camera_vendor_section_name)(
const vendor_tag_query_ops_t *v,
uint32_t tag);
/**
* Get tag name for a vendor-specified entry tag. Only called for tags >=
* 0x80000000. Must return NULL if the tag is outside the bounds of
* vendor-defined sections.
*/
const char *(*get_camera_vendor_tag_name)(
const vendor_tag_query_ops_t *v,
uint32_t tag);
/**
* Get tag type for a vendor-specified entry tag. Only called for tags >=
* 0x80000000. Must return -1 if the tag is outside the bounds of
* vendor-defined sections.
*/
int (*get_camera_vendor_tag_type)(
const vendor_tag_query_ops_t *v,
uint32_t tag);
/**
* Get the number of vendor tags supported on this platform. Used to
* calculate the size of buffer needed for holding the array of all tags
* returned by get_camera_vendor_tags().
*/
int (*get_camera_vendor_tag_count)(
const vendor_tag_query_ops_t *v);
/**
* Fill an array with all the supported vendor tags on this platform.
* get_camera_vendor_tag_count() returns the number of tags supported, and
* tag_array should be allocated with enough space to hold all of the tags.
*/
void (*get_camera_vendor_tags)(
const vendor_tag_query_ops_t *v,
uint32_t *tag_array);
};
/**
* **DEPRECATED** - This should only be used by the camera framework. Camera
* metadata will transition to using vendor_tag_ops defined in
* camera_vendor_tags.h instead.
*/
ANDROID_API
int set_camera_metadata_vendor_tag_ops(const vendor_tag_query_ops_t *query_ops);
/**
* Print fields in the metadata to the log.
* verbosity = 0: Only tag entry information
* verbosity = 1: Tag entry information plus at most 16 data values
* verbosity = 2: All information
*/
ANDROID_API
void dump_camera_metadata(const camera_metadata_t *metadata,
int fd,
int verbosity);
/**
* Print fields in the metadata to the log; adds indentation parameter, which
* specifies the number of spaces to insert before each line of the dump
*/
ANDROID_API
void dump_indented_camera_metadata(const camera_metadata_t *metadata,
int fd,
int verbosity,
int indentation);
/**
* Prints the specified tag value as a string. Only works for enum tags.
* Returns 0 on success, -1 on failure.
*/
ANDROID_API
int camera_metadata_enum_snprint(uint32_t tag,
uint32_t value,
char *dst,
size_t size);
#ifdef __cplusplus
}
#endif
#endif

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,159 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H
#define SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H
#ifdef __cplusplus
extern "C" {
#endif
#define CAMERA_METADATA_VENDOR_TAG_BOUNDARY 0x80000000u
#define CAMERA_METADATA_INVALID_VENDOR_ID UINT64_MAX
typedef uint64_t metadata_vendor_id_t;
/**
* Vendor tags:
*
* This structure contains basic functions for enumerating an immutable set of
* vendor-defined camera metadata tags, and querying static information about
* their structure/type. The intended use of this information is to validate
* the structure of metadata returned by the camera HAL, and to allow vendor-
* defined metadata tags to be visible in application facing camera API.
*/
typedef struct vendor_tag_ops vendor_tag_ops_t;
struct vendor_tag_ops {
/**
* Get the number of vendor tags supported on this platform. Used to
* calculate the size of buffer needed for holding the array of all tags
* returned by get_all_tags(). This must return -1 on error.
*/
int (*get_tag_count)(const vendor_tag_ops_t *v);
/**
* Fill an array with all of the supported vendor tags on this platform.
* get_tag_count() must return the number of tags supported, and
* tag_array will be allocated with enough space to hold the number of tags
* returned by get_tag_count().
*/
void (*get_all_tags)(const vendor_tag_ops_t *v, uint32_t *tag_array);
/**
* Get the vendor section name for a vendor-specified entry tag. This will
* only be called for vendor-defined tags.
*
* The naming convention for the vendor-specific section names should
* follow a style similar to the Java package style. For example,
* CameraZoom Inc. must prefix their sections with "com.camerazoom."
* This must return NULL if the tag is outside the bounds of
* vendor-defined sections.
*
* There may be different vendor-defined tag sections, for example the
* phone maker, the chipset maker, and the camera module maker may each
* have their own "com.vendor."-prefixed section.
*
* The memory pointed to by the return value must remain valid for the
* lifetime of the module, and is owned by the module.
*/
const char *(*get_section_name)(const vendor_tag_ops_t *v, uint32_t tag);
/**
* Get the tag name for a vendor-specified entry tag. This is only called
* for vendor-defined tags, and must return NULL if it is not a
* vendor-defined tag.
*
* The memory pointed to by the return value must remain valid for the
* lifetime of the module, and is owned by the module.
*/
const char *(*get_tag_name)(const vendor_tag_ops_t *v, uint32_t tag);
/**
* Get tag type for a vendor-specified entry tag. The type returned must be
* a valid type defined in camera_metadata.h. This method is only called
* for tags >= CAMERA_METADATA_VENDOR_TAG_BOUNDARY, and must return
* -1 if the tag is outside the bounds of the vendor-defined sections.
*/
int (*get_tag_type)(const vendor_tag_ops_t *v, uint32_t tag);
/* Reserved for future use. These must be initialized to NULL. */
void* reserved[8];
};
struct vendor_tag_cache_ops {
/**
* Get the number of vendor tags supported on this platform. Used to
* calculate the size of buffer needed for holding the array of all tags
* returned by get_all_tags(). This must return -1 on error.
*/
int (*get_tag_count)(metadata_vendor_id_t id);
/**
* Fill an array with all of the supported vendor tags on this platform.
* get_tag_count() must return the number of tags supported, and
* tag_array will be allocated with enough space to hold the number of tags
* returned by get_tag_count().
*/
void (*get_all_tags)(uint32_t *tag_array, metadata_vendor_id_t id);
/**
* Get the vendor section name for a vendor-specified entry tag. This will
* only be called for vendor-defined tags.
*
* The naming convention for the vendor-specific section names should
* follow a style similar to the Java package style. For example,
* CameraZoom Inc. must prefix their sections with "com.camerazoom."
* This must return NULL if the tag is outside the bounds of
* vendor-defined sections.
*
* There may be different vendor-defined tag sections, for example the
* phone maker, the chipset maker, and the camera module maker may each
* have their own "com.vendor."-prefixed section.
*
* The memory pointed to by the return value must remain valid for the
* lifetime of the module, and is owned by the module.
*/
const char *(*get_section_name)(uint32_t tag, metadata_vendor_id_t id);
/**
* Get the tag name for a vendor-specified entry tag. This is only called
* for vendor-defined tags, and must return NULL if it is not a
* vendor-defined tag.
*
* The memory pointed to by the return value must remain valid for the
* lifetime of the module, and is owned by the module.
*/
const char *(*get_tag_name)(uint32_t tag, metadata_vendor_id_t id);
/**
* Get tag type for a vendor-specified entry tag. The type returned must be
* a valid type defined in camera_metadata.h. This method is only called
* for tags >= CAMERA_METADATA_VENDOR_TAG_BOUNDARY, and must return
* -1 if the tag is outside the bounds of the vendor-defined sections.
*/
int (*get_tag_type)(uint32_t tag, metadata_vendor_id_t id);
/* Reserved for future use. These must be initialized to NULL. */
void* reserved[8];
};
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H */

View File

@ -0,0 +1,145 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _ANDROID_LOG_H
#define _ANDROID_LOG_H
/******************************************************************
*
* IMPORTANT NOTICE:
*
* This file is part of Android's set of stable system headers
* exposed by the Android NDK (Native Development Kit) since
* platform release 1.5
*
* Third-party source AND binary code relies on the definitions
* here to be FROZEN ON ALL UPCOMING PLATFORM RELEASES.
*
* - DO NOT MODIFY ENUMS (EXCEPT IF YOU ADD NEW 32-BIT VALUES)
* - DO NOT MODIFY CONSTANTS OR FUNCTIONAL MACROS
* - DO NOT CHANGE THE SIGNATURE OF FUNCTIONS IN ANY WAY
* - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES
*/
/*
* Support routines to send messages to the Android in-kernel log buffer,
* which can later be accessed through the 'logcat' utility.
*
* Each log message must have
* - a priority
* - a log tag
* - some text
*
* The tag normally corresponds to the component that emits the log message,
* and should be reasonably small.
*
* Log message text may be truncated to less than an implementation-specific
* limit (e.g. 1023 characters max).
*
* Note that a newline character ("\n") will be appended automatically to your
* log message, if not already there. It is not possible to send several messages
* and have them appear on a single line in logcat.
*
* PLEASE USE LOGS WITH MODERATION:
*
* - Sending log messages eats CPU and slow down your application and the
* system.
*
* - The circular log buffer is pretty small (<64KB), sending many messages
* might push off other important log messages from the rest of the system.
*
* - In release builds, only send log messages to account for exceptional
* conditions.
*
* NOTE: These functions MUST be implemented by /system/lib/liblog.so
*/
#include <stdarg.h>
#ifdef __cplusplus
extern "C" {
#endif
/*
* Android log priority values, in ascending priority order.
*/
typedef enum android_LogPriority {
ANDROID_LOG_UNKNOWN = 0,
ANDROID_LOG_DEFAULT, /* only for SetMinPriority() */
ANDROID_LOG_VERBOSE,
ANDROID_LOG_DEBUG,
ANDROID_LOG_INFO,
ANDROID_LOG_WARN,
ANDROID_LOG_ERROR,
ANDROID_LOG_FATAL,
ANDROID_LOG_SILENT, /* only for SetMinPriority(); must be last */
} android_LogPriority;
/*
* Send a simple string to the log.
*/
int __android_log_write(int prio, const char *tag, const char *text);
/*
* Send a formatted string to the log, used like printf(fmt,...)
*/
int __android_log_print(int prio, const char *tag, const char *fmt, ...)
#if defined(__GNUC__)
#ifdef __USE_MINGW_ANSI_STDIO
#if __USE_MINGW_ANSI_STDIO
__attribute__ ((format(gnu_printf, 3, 4)))
#else
__attribute__ ((format(printf, 3, 4)))
#endif
#else
__attribute__ ((format(printf, 3, 4)))
#endif
#endif
;
/*
* A variant of __android_log_print() that takes a va_list to list
* additional parameters.
*/
int __android_log_vprint(int prio, const char *tag,
const char *fmt, va_list ap);
/*
* Log an assertion failure and abort the process to have a chance
* to inspect it if a debugger is attached. This uses the FATAL priority.
*/
void __android_log_assert(const char *cond, const char *tag,
const char *fmt, ...)
#if defined(__GNUC__)
__attribute__ ((noreturn))
#ifdef __USE_MINGW_ANSI_STDIO
#if __USE_MINGW_ANSI_STDIO
__attribute__ ((format(gnu_printf, 3, 4)))
#else
__attribute__ ((format(printf, 3, 4)))
#endif
#else
__attribute__ ((format(printf, 3, 4)))
#endif
#endif
;
#ifdef __cplusplus
}
#endif
#endif /* _ANDROID_LOG_H */

View File

@ -0,0 +1,45 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_CUTILS_COMPILER_H
#define ANDROID_CUTILS_COMPILER_H
/*
* helps the compiler's optimizer predicting branches
*/
#ifdef __cplusplus
# define CC_LIKELY( exp ) (__builtin_expect( !!(exp), true ))
# define CC_UNLIKELY( exp ) (__builtin_expect( !!(exp), false ))
#else
# define CC_LIKELY( exp ) (__builtin_expect( !!(exp), 1 ))
# define CC_UNLIKELY( exp ) (__builtin_expect( !!(exp), 0 ))
#endif
/**
* exports marked symbols
*
* if used on a C++ class declaration, this macro must be inserted
* after the "class" keyword. For instance:
*
* template <typename TYPE>
* class ANDROID_API Singleton { }
*/
#define ANDROID_API __attribute__((visibility("default")))
#endif // ANDROID_CUTILS_COMPILER_H

View File

@ -0,0 +1,106 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVE_HANDLE_H_
#define NATIVE_HANDLE_H_
#include <stdalign.h>
#ifdef __cplusplus
extern "C" {
#endif
#define NATIVE_HANDLE_MAX_FDS 1024
#define NATIVE_HANDLE_MAX_INTS 1024
/* Declare a char array for use with native_handle_init */
#define NATIVE_HANDLE_DECLARE_STORAGE(name, maxFds, maxInts) \
alignas(native_handle_t) char (name)[ \
sizeof(native_handle_t) + sizeof(int) * ((maxFds) + (maxInts))]
typedef struct native_handle
{
int version; /* sizeof(native_handle_t) */
int numFds; /* number of file-descriptors at &data[0] */
int numInts; /* number of ints at &data[numFds] */
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wzero-length-array"
#endif
int data[0]; /* numFds + numInts ints */
#if defined(__clang__)
#pragma clang diagnostic pop
#endif
} native_handle_t;
typedef const native_handle_t* buffer_handle_t;
/*
* native_handle_close
*
* closes the file descriptors contained in this native_handle_t
*
* return 0 on success, or a negative error code on failure
*
*/
int native_handle_close(const native_handle_t* h);
/*
* native_handle_init
*
* Initializes a native_handle_t from storage. storage must be declared with
* NATIVE_HANDLE_DECLARE_STORAGE. numFds and numInts must not respectively
* exceed maxFds and maxInts used to declare the storage.
*/
native_handle_t* native_handle_init(char* storage, int numFds, int numInts);
/*
* native_handle_create
*
* creates a native_handle_t and initializes it. must be destroyed with
* native_handle_delete().
*
*/
native_handle_t* native_handle_create(int numFds, int numInts);
/*
* native_handle_clone
*
* creates a native_handle_t and initializes it from another native_handle_t.
* Must be destroyed with native_handle_delete().
*
*/
native_handle_t* native_handle_clone(const native_handle_t* handle);
/*
* native_handle_delete
*
* frees a native_handle_t allocated with native_handle_create().
* This ONLY frees the memory allocated for the native_handle_t, but doesn't
* close the file descriptors; which can be achieved with native_handle_close().
*
* return 0 on success, or a negative error code on failure
*
*/
int native_handle_delete(native_handle_t* h);
#ifdef __cplusplus
}
#endif
#endif /* NATIVE_HANDLE_H_ */

View File

@ -0,0 +1,308 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SYSTEM_CORE_INCLUDE_ANDROID_CAMERA_H
#define SYSTEM_CORE_INCLUDE_ANDROID_CAMERA_H
#include <stdint.h>
#include <sys/cdefs.h>
#include <sys/types.h>
#include <cutils/native_handle.h>
#include <hardware/hardware.h>
#include <hardware/gralloc.h>
__BEGIN_DECLS
/**
* A set of bit masks for specifying how the received preview frames are
* handled before the previewCallback() call.
*
* The least significant 3 bits of an "int" value are used for this purpose:
*
* ..... 0 0 0
* ^ ^ ^
* | | |---------> determine whether the callback is enabled or not
* | |-----------> determine whether the callback is one-shot or not
* |-------------> determine whether the frame is copied out or not
*
* WARNING: When a frame is sent directly without copying, it is the frame
* receiver's responsiblity to make sure that the frame data won't get
* corrupted by subsequent preview frames filled by the camera. This flag is
* recommended only when copying out data brings significant performance price
* and the handling/processing of the received frame data is always faster than
* the preview frame rate so that data corruption won't occur.
*
* For instance,
* 1. 0x00 disables the callback. In this case, copy out and one shot bits
* are ignored.
* 2. 0x01 enables a callback without copying out the received frames. A
* typical use case is the Camcorder application to avoid making costly
* frame copies.
* 3. 0x05 is enabling a callback with frame copied out repeatedly. A typical
* use case is the Camera application.
* 4. 0x07 is enabling a callback with frame copied out only once. A typical
* use case is the Barcode scanner application.
*/
enum {
CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK = 0x01,
CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK = 0x02,
CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK = 0x04,
/** Typical use cases */
CAMERA_FRAME_CALLBACK_FLAG_NOOP = 0x00,
CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER = 0x01,
CAMERA_FRAME_CALLBACK_FLAG_CAMERA = 0x05,
CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER = 0x07
};
/** msgType in notifyCallback and dataCallback functions */
enum {
CAMERA_MSG_ERROR = 0x0001, // notifyCallback
CAMERA_MSG_SHUTTER = 0x0002, // notifyCallback
CAMERA_MSG_FOCUS = 0x0004, // notifyCallback
CAMERA_MSG_ZOOM = 0x0008, // notifyCallback
CAMERA_MSG_PREVIEW_FRAME = 0x0010, // dataCallback
CAMERA_MSG_VIDEO_FRAME = 0x0020, // data_timestamp_callback
CAMERA_MSG_POSTVIEW_FRAME = 0x0040, // dataCallback
CAMERA_MSG_RAW_IMAGE = 0x0080, // dataCallback
CAMERA_MSG_COMPRESSED_IMAGE = 0x0100, // dataCallback
CAMERA_MSG_RAW_IMAGE_NOTIFY = 0x0200, // dataCallback
// Preview frame metadata. This can be combined with
// CAMERA_MSG_PREVIEW_FRAME in dataCallback. For example, the apps can
// request FRAME and METADATA. Or the apps can request only FRAME or only
// METADATA.
CAMERA_MSG_PREVIEW_METADATA = 0x0400, // dataCallback
// Notify on autofocus start and stop. This is useful in continuous
// autofocus - FOCUS_MODE_CONTINUOUS_VIDEO and FOCUS_MODE_CONTINUOUS_PICTURE.
CAMERA_MSG_FOCUS_MOVE = 0x0800, // notifyCallback
CAMERA_MSG_ALL_MSGS = 0xFFFF
};
/** cmdType in sendCommand functions */
enum {
CAMERA_CMD_START_SMOOTH_ZOOM = 1,
CAMERA_CMD_STOP_SMOOTH_ZOOM = 2,
/**
* Set the clockwise rotation of preview display (setPreviewDisplay) in
* degrees. This affects the preview frames and the picture displayed after
* snapshot. This method is useful for portrait mode applications. Note
* that preview display of front-facing cameras is flipped horizontally
* before the rotation, that is, the image is reflected along the central
* vertical axis of the camera sensor. So the users can see themselves as
* looking into a mirror.
*
* This does not affect the order of byte array of
* CAMERA_MSG_PREVIEW_FRAME, CAMERA_MSG_VIDEO_FRAME,
* CAMERA_MSG_POSTVIEW_FRAME, CAMERA_MSG_RAW_IMAGE, or
* CAMERA_MSG_COMPRESSED_IMAGE. This is allowed to be set during preview
* since API level 14.
*/
CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3,
/**
* cmdType to disable/enable shutter sound. In sendCommand passing arg1 =
* 0 will disable, while passing arg1 = 1 will enable the shutter sound.
*/
CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4,
/* cmdType to play recording sound */
CAMERA_CMD_PLAY_RECORDING_SOUND = 5,
/**
* Start the face detection. This should be called after preview is started.
* The camera will notify the listener of CAMERA_MSG_FACE and the detected
* faces in the preview frame. The detected faces may be the same as the
* previous ones. Apps should call CAMERA_CMD_STOP_FACE_DETECTION to stop
* the face detection. This method is supported if CameraParameters
* KEY_MAX_NUM_HW_DETECTED_FACES or KEY_MAX_NUM_SW_DETECTED_FACES is
* bigger than 0. Hardware and software face detection should not be running
* at the same time. If the face detection has started, apps should not send
* this again.
*
* In hardware face detection mode, CameraParameters KEY_WHITE_BALANCE,
* KEY_FOCUS_AREAS and KEY_METERING_AREAS have no effect.
*
* arg1 is the face detection type. It can be CAMERA_FACE_DETECTION_HW or
* CAMERA_FACE_DETECTION_SW. If the type of face detection requested is not
* supported, the HAL must return BAD_VALUE.
*/
CAMERA_CMD_START_FACE_DETECTION = 6,
/**
* Stop the face detection.
*/
CAMERA_CMD_STOP_FACE_DETECTION = 7,
/**
* Enable/disable focus move callback (CAMERA_MSG_FOCUS_MOVE). Passing
* arg1 = 0 will disable, while passing arg1 = 1 will enable the callback.
*/
CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG = 8,
/**
* Ping camera service to see if camera hardware is released.
*
* When any camera method returns error, the client can use ping command
* to see if the camera has been taken away by other clients. If the result
* is NO_ERROR, it means the camera hardware is not released. If the result
* is not NO_ERROR, the camera has been released and the existing client
* can silently finish itself or show a dialog.
*/
CAMERA_CMD_PING = 9,
/**
* Configure the number of video buffers used for recording. The intended
* video buffer count for recording is passed as arg1, which must be
* greater than 0. This command must be sent before recording is started.
* This command returns INVALID_OPERATION error if it is sent after video
* recording is started, or the command is not supported at all. This
* command also returns a BAD_VALUE error if the intended video buffer
* count is non-positive or too big to be realized.
*/
CAMERA_CMD_SET_VIDEO_BUFFER_COUNT = 10,
/**
* Configure an explicit format to use for video recording metadata mode.
* This can be used to switch the format from the
* default IMPLEMENTATION_DEFINED gralloc format to some other
* device-supported format, and the default dataspace from the BT_709 color
* space to some other device-supported dataspace. arg1 is the HAL pixel
* format, and arg2 is the HAL dataSpace. This command returns
* INVALID_OPERATION error if it is sent after video recording is started,
* or the command is not supported at all.
*
* If the gralloc format is set to a format other than
* IMPLEMENTATION_DEFINED, then HALv3 devices will use gralloc usage flags
* of SW_READ_OFTEN.
*/
CAMERA_CMD_SET_VIDEO_FORMAT = 11
};
/** camera fatal errors */
enum {
CAMERA_ERROR_UNKNOWN = 1,
/**
* Camera was released because another client has connected to the camera.
* The original client should call Camera::disconnect immediately after
* getting this notification. Otherwise, the camera will be released by
* camera service in a short time. The client should not call any method
* (except disconnect and sending CAMERA_CMD_PING) after getting this.
*/
CAMERA_ERROR_RELEASED = 2,
/**
* Camera was released because device policy change or the client application
* is going to background. The client should call Camera::disconnect
* immediately after getting this notification. Otherwise, the camera will be
* released by camera service in a short time. The client should not call any
* method (except disconnect and sending CAMERA_CMD_PING) after getting this.
*/
CAMERA_ERROR_DISABLED = 3,
CAMERA_ERROR_SERVER_DIED = 100
};
enum {
/** The facing of the camera is opposite to that of the screen. */
CAMERA_FACING_BACK = 0,
/** The facing of the camera is the same as that of the screen. */
CAMERA_FACING_FRONT = 1,
/**
* The facing of the camera is not fixed relative to the screen.
* The cameras with this facing are external cameras, e.g. USB cameras.
*/
CAMERA_FACING_EXTERNAL = 2
};
enum {
/** Hardware face detection. It does not use much CPU. */
CAMERA_FACE_DETECTION_HW = 0,
/**
* Software face detection. It uses some CPU. Applications must use
* Camera.setPreviewTexture for preview in this mode.
*/
CAMERA_FACE_DETECTION_SW = 1
};
/**
* The information of a face from camera face detection.
*/
typedef struct camera_face {
/**
* Bounds of the face [left, top, right, bottom]. (-1000, -1000) represents
* the top-left of the camera field of view, and (1000, 1000) represents the
* bottom-right of the field of view. The width and height cannot be 0 or
* negative. This is supported by both hardware and software face detection.
*
* The direction is relative to the sensor orientation, that is, what the
* sensor sees. The direction is not affected by the rotation or mirroring
* of CAMERA_CMD_SET_DISPLAY_ORIENTATION.
*/
int32_t rect[4];
/**
* The confidence level of the face. The range is 1 to 100. 100 is the
* highest confidence. This is supported by both hardware and software
* face detection.
*/
int32_t score;
/**
* An unique id per face while the face is visible to the tracker. If
* the face leaves the field-of-view and comes back, it will get a new
* id. If the value is 0, id is not supported.
*/
int32_t id;
/**
* The coordinates of the center of the left eye. The range is -1000 to
* 1000. -2000, -2000 if this is not supported.
*/
int32_t left_eye[2];
/**
* The coordinates of the center of the right eye. The range is -1000 to
* 1000. -2000, -2000 if this is not supported.
*/
int32_t right_eye[2];
/**
* The coordinates of the center of the mouth. The range is -1000 to 1000.
* -2000, -2000 if this is not supported.
*/
int32_t mouth[2];
} camera_face_t;
/**
* The metadata of the frame data.
*/
typedef struct camera_frame_metadata {
/**
* The number of detected faces in the frame.
*/
int32_t number_of_faces;
/**
* An array of the detected faces. The length is number_of_faces.
*/
camera_face_t *faces;
} camera_frame_metadata_t;
__END_DECLS
#endif /* SYSTEM_CORE_INCLUDE_ANDROID_CAMERA_H */

View File

@ -0,0 +1,141 @@
/* SPDX-License-Identifier: Apache-2.0 */
// This file is autogenerated by hidl-gen. Do not edit manually.
// Source: android.hardware.graphics.common@1.0
// Location: hardware/interfaces/graphics/common/1.0/
#ifndef HIDL_GENERATED_ANDROID_HARDWARE_GRAPHICS_COMMON_V1_0_EXPORTED_CONSTANTS_H_
#define HIDL_GENERATED_ANDROID_HARDWARE_GRAPHICS_COMMON_V1_0_EXPORTED_CONSTANTS_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef enum {
HAL_PIXEL_FORMAT_RGBA_8888 = 1,
HAL_PIXEL_FORMAT_RGBX_8888 = 2,
HAL_PIXEL_FORMAT_RGB_888 = 3,
HAL_PIXEL_FORMAT_RGB_565 = 4,
HAL_PIXEL_FORMAT_BGRA_8888 = 5,
HAL_PIXEL_FORMAT_YCBCR_422_SP = 16,
HAL_PIXEL_FORMAT_YCRCB_420_SP = 17,
HAL_PIXEL_FORMAT_YCBCR_422_I = 20,
HAL_PIXEL_FORMAT_RGBA_FP16 = 22,
HAL_PIXEL_FORMAT_RAW16 = 32,
HAL_PIXEL_FORMAT_BLOB = 33,
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 34,
HAL_PIXEL_FORMAT_YCBCR_420_888 = 35,
HAL_PIXEL_FORMAT_RAW_OPAQUE = 36,
HAL_PIXEL_FORMAT_RAW10 = 37,
HAL_PIXEL_FORMAT_RAW12 = 38,
HAL_PIXEL_FORMAT_RGBA_1010102 = 43,
HAL_PIXEL_FORMAT_Y8 = 538982489,
HAL_PIXEL_FORMAT_Y16 = 540422489,
HAL_PIXEL_FORMAT_YV12 = 842094169,
} android_pixel_format_t;
typedef enum {
HAL_TRANSFORM_FLIP_H = 1, // (1 << 0)
HAL_TRANSFORM_FLIP_V = 2, // (1 << 1)
HAL_TRANSFORM_ROT_90 = 4, // (1 << 2)
HAL_TRANSFORM_ROT_180 = 3, // (FLIP_H | FLIP_V)
HAL_TRANSFORM_ROT_270 = 7, // ((FLIP_H | FLIP_V) | ROT_90)
} android_transform_t;
typedef enum {
HAL_DATASPACE_UNKNOWN = 0,
HAL_DATASPACE_ARBITRARY = 1,
HAL_DATASPACE_STANDARD_SHIFT = 16,
HAL_DATASPACE_STANDARD_MASK = 4128768, // (63 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_UNSPECIFIED = 0, // (0 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT709 = 65536, // (1 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT601_625 = 131072, // (2 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT601_625_UNADJUSTED = 196608, // (3 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT601_525 = 262144, // (4 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT601_525_UNADJUSTED = 327680, // (5 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT2020 = 393216, // (6 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT2020_CONSTANT_LUMINANCE = 458752, // (7 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_BT470M = 524288, // (8 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_FILM = 589824, // (9 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_DCI_P3 = 655360, // (10 << STANDARD_SHIFT)
HAL_DATASPACE_STANDARD_ADOBE_RGB = 720896, // (11 << STANDARD_SHIFT)
HAL_DATASPACE_TRANSFER_SHIFT = 22,
HAL_DATASPACE_TRANSFER_MASK = 130023424, // (31 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_UNSPECIFIED = 0, // (0 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_LINEAR = 4194304, // (1 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_SRGB = 8388608, // (2 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_SMPTE_170M = 12582912, // (3 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_GAMMA2_2 = 16777216, // (4 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_GAMMA2_6 = 20971520, // (5 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_GAMMA2_8 = 25165824, // (6 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_ST2084 = 29360128, // (7 << TRANSFER_SHIFT)
HAL_DATASPACE_TRANSFER_HLG = 33554432, // (8 << TRANSFER_SHIFT)
HAL_DATASPACE_RANGE_SHIFT = 27,
HAL_DATASPACE_RANGE_MASK = 939524096, // (7 << RANGE_SHIFT)
HAL_DATASPACE_RANGE_UNSPECIFIED = 0, // (0 << RANGE_SHIFT)
HAL_DATASPACE_RANGE_FULL = 134217728, // (1 << RANGE_SHIFT)
HAL_DATASPACE_RANGE_LIMITED = 268435456, // (2 << RANGE_SHIFT)
HAL_DATASPACE_RANGE_EXTENDED = 402653184, // (3 << RANGE_SHIFT)
HAL_DATASPACE_SRGB_LINEAR = 512,
HAL_DATASPACE_V0_SRGB_LINEAR = 138477568, // ((STANDARD_BT709 | TRANSFER_LINEAR) | RANGE_FULL)
HAL_DATASPACE_V0_SCRGB_LINEAR =
406913024, // ((STANDARD_BT709 | TRANSFER_LINEAR) | RANGE_EXTENDED)
HAL_DATASPACE_SRGB = 513,
HAL_DATASPACE_V0_SRGB = 142671872, // ((STANDARD_BT709 | TRANSFER_SRGB) | RANGE_FULL)
HAL_DATASPACE_V0_SCRGB = 411107328, // ((STANDARD_BT709 | TRANSFER_SRGB) | RANGE_EXTENDED)
HAL_DATASPACE_JFIF = 257,
HAL_DATASPACE_V0_JFIF = 146931712, // ((STANDARD_BT601_625 | TRANSFER_SMPTE_170M) | RANGE_FULL)
HAL_DATASPACE_BT601_625 = 258,
HAL_DATASPACE_V0_BT601_625 =
281149440, // ((STANDARD_BT601_625 | TRANSFER_SMPTE_170M) | RANGE_LIMITED)
HAL_DATASPACE_BT601_525 = 259,
HAL_DATASPACE_V0_BT601_525 =
281280512, // ((STANDARD_BT601_525 | TRANSFER_SMPTE_170M) | RANGE_LIMITED)
HAL_DATASPACE_BT709 = 260,
HAL_DATASPACE_V0_BT709 = 281083904, // ((STANDARD_BT709 | TRANSFER_SMPTE_170M) | RANGE_LIMITED)
HAL_DATASPACE_DCI_P3_LINEAR = 139067392, // ((STANDARD_DCI_P3 | TRANSFER_LINEAR) | RANGE_FULL)
HAL_DATASPACE_DCI_P3 = 155844608, // ((STANDARD_DCI_P3 | TRANSFER_GAMMA2_6) | RANGE_FULL)
HAL_DATASPACE_DISPLAY_P3_LINEAR =
139067392, // ((STANDARD_DCI_P3 | TRANSFER_LINEAR) | RANGE_FULL)
HAL_DATASPACE_DISPLAY_P3 = 143261696, // ((STANDARD_DCI_P3 | TRANSFER_SRGB) | RANGE_FULL)
HAL_DATASPACE_ADOBE_RGB = 151715840, // ((STANDARD_ADOBE_RGB | TRANSFER_GAMMA2_2) | RANGE_FULL)
HAL_DATASPACE_BT2020_LINEAR = 138805248, // ((STANDARD_BT2020 | TRANSFER_LINEAR) | RANGE_FULL)
HAL_DATASPACE_BT2020 = 147193856, // ((STANDARD_BT2020 | TRANSFER_SMPTE_170M) | RANGE_FULL)
HAL_DATASPACE_BT2020_PQ = 163971072, // ((STANDARD_BT2020 | TRANSFER_ST2084) | RANGE_FULL)
HAL_DATASPACE_DEPTH = 4096,
HAL_DATASPACE_SENSOR = 4097,
} android_dataspace_t;
typedef enum {
HAL_COLOR_MODE_NATIVE = 0,
HAL_COLOR_MODE_STANDARD_BT601_625 = 1,
HAL_COLOR_MODE_STANDARD_BT601_625_UNADJUSTED = 2,
HAL_COLOR_MODE_STANDARD_BT601_525 = 3,
HAL_COLOR_MODE_STANDARD_BT601_525_UNADJUSTED = 4,
HAL_COLOR_MODE_STANDARD_BT709 = 5,
HAL_COLOR_MODE_DCI_P3 = 6,
HAL_COLOR_MODE_SRGB = 7,
HAL_COLOR_MODE_ADOBE_RGB = 8,
HAL_COLOR_MODE_DISPLAY_P3 = 9,
} android_color_mode_t;
typedef enum {
HAL_COLOR_TRANSFORM_IDENTITY = 0,
HAL_COLOR_TRANSFORM_ARBITRARY_MATRIX = 1,
HAL_COLOR_TRANSFORM_VALUE_INVERSE = 2,
HAL_COLOR_TRANSFORM_GRAYSCALE = 3,
HAL_COLOR_TRANSFORM_CORRECT_PROTANOPIA = 4,
HAL_COLOR_TRANSFORM_CORRECT_DEUTERANOPIA = 5,
HAL_COLOR_TRANSFORM_CORRECT_TRITANOPIA = 6,
} android_color_transform_t;
typedef enum {
HAL_HDR_DOLBY_VISION = 1,
HAL_HDR_HDR10 = 2,
HAL_HDR_HLG = 3,
} android_hdr_t;
#ifdef __cplusplus
}
#endif
#endif // HIDL_GENERATED_ANDROID_HARDWARE_GRAPHICS_COMMON_V1_0_EXPORTED_CONSTANTS_H_

View File

@ -0,0 +1,49 @@
/* SPDX-License-Identifier: Apache-2.0 */
// This file is autogenerated by hidl-gen. Do not edit manually.
// Source: android.hardware.graphics.common@1.1
// Location: hardware/interfaces/graphics/common/1.1/
#ifndef HIDL_GENERATED_ANDROID_HARDWARE_GRAPHICS_COMMON_V1_1_EXPORTED_CONSTANTS_H_
#define HIDL_GENERATED_ANDROID_HARDWARE_GRAPHICS_COMMON_V1_1_EXPORTED_CONSTANTS_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef enum {
HAL_PIXEL_FORMAT_DEPTH_16 = 48,
HAL_PIXEL_FORMAT_DEPTH_24 = 49,
HAL_PIXEL_FORMAT_DEPTH_24_STENCIL_8 = 50,
HAL_PIXEL_FORMAT_DEPTH_32F = 51,
HAL_PIXEL_FORMAT_DEPTH_32F_STENCIL_8 = 52,
HAL_PIXEL_FORMAT_STENCIL_8 = 53,
HAL_PIXEL_FORMAT_YCBCR_P010 = 54,
} android_pixel_format_v1_1_t;
typedef enum {
HAL_DATASPACE_BT2020_ITU =
281411584, // ((STANDARD_BT2020 | TRANSFER_SMPTE_170M) | RANGE_LIMITED)
HAL_DATASPACE_BT2020_ITU_PQ =
298188800, // ((STANDARD_BT2020 | TRANSFER_ST2084) | RANGE_LIMITED)
HAL_DATASPACE_BT2020_ITU_HLG = 302383104, // ((STANDARD_BT2020 | TRANSFER_HLG) | RANGE_LIMITED)
HAL_DATASPACE_BT2020_HLG = 168165376, // ((STANDARD_BT2020 | TRANSFER_HLG) | RANGE_FULL)
} android_dataspace_v1_1_t;
typedef enum {
HAL_COLOR_MODE_BT2020 = 10,
HAL_COLOR_MODE_BT2100_PQ = 11,
HAL_COLOR_MODE_BT2100_HLG = 12,
} android_color_mode_v1_1_t;
typedef enum {
HAL_RENDER_INTENT_COLORIMETRIC = 0,
HAL_RENDER_INTENT_ENHANCE = 1,
HAL_RENDER_INTENT_TONE_MAP_COLORIMETRIC = 2,
HAL_RENDER_INTENT_TONE_MAP_ENHANCE = 3,
} android_render_intent_v1_1_t;
#ifdef __cplusplus
}
#endif
#endif // HIDL_GENERATED_ANDROID_HARDWARE_GRAPHICS_COMMON_V1_1_EXPORTED_CONSTANTS_H_

View File

@ -0,0 +1,8 @@
/* SPDX-License-Identifier: Apache-2.0 */
#ifndef SYSTEM_CORE_GRAPHICS_BASE_H_
#define SYSTEM_CORE_GRAPHICS_BASE_H_
#include "graphics-base-v1.0.h"
#include "graphics-base-v1.1.h"
#endif // SYSTEM_CORE_GRAPHICS_BASE_H_

View File

@ -0,0 +1,17 @@
/* SPDX-License-Identifier: Apache-2.0 */
#ifndef SYSTEM_CORE_GRAPHICS_SW_H_
#define SYSTEM_CORE_GRAPHICS_SW_H_
/* Software formats not in the HAL definitions. */
typedef enum {
HAL_PIXEL_FORMAT_YCBCR_422_888 = 39, // 0x27
HAL_PIXEL_FORMAT_YCBCR_444_888 = 40, // 0x28
HAL_PIXEL_FORMAT_FLEX_RGB_888 = 41, // 0x29
HAL_PIXEL_FORMAT_FLEX_RGBA_8888 = 42, // 0x2A
} android_pixel_format_sw_t;
/* for compatibility */
#define HAL_PIXEL_FORMAT_YCbCr_422_888 HAL_PIXEL_FORMAT_YCBCR_422_888
#define HAL_PIXEL_FORMAT_YCbCr_444_888 HAL_PIXEL_FORMAT_YCBCR_444_888
#endif // SYSTEM_CORE_GRAPHICS_SW_H_

View File

@ -0,0 +1,269 @@
/* SPDX-License-Identifier: Apache-2.0 */
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SYSTEM_CORE_INCLUDE_ANDROID_GRAPHICS_H
#define SYSTEM_CORE_INCLUDE_ANDROID_GRAPHICS_H
#include <stddef.h>
#include <stdint.h>
/*
* Some of the enums are now defined in HIDL in hardware/interfaces and are
* generated.
*/
#include "graphics-base.h"
#include "graphics-sw.h"
#ifdef __cplusplus
extern "C" {
#endif
/* for compatibility */
#define HAL_PIXEL_FORMAT_YCbCr_420_888 HAL_PIXEL_FORMAT_YCBCR_420_888
#define HAL_PIXEL_FORMAT_YCbCr_422_SP HAL_PIXEL_FORMAT_YCBCR_422_SP
#define HAL_PIXEL_FORMAT_YCrCb_420_SP HAL_PIXEL_FORMAT_YCRCB_420_SP
#define HAL_PIXEL_FORMAT_YCbCr_422_I HAL_PIXEL_FORMAT_YCBCR_422_I
typedef android_pixel_format_t android_pixel_format;
typedef android_transform_t android_transform;
typedef android_dataspace_t android_dataspace;
typedef android_color_mode_t android_color_mode;
typedef android_color_transform_t android_color_transform;
typedef android_hdr_t android_hdr;
/*
* If the HAL needs to create service threads to handle graphics related
* tasks, these threads need to run at HAL_PRIORITY_URGENT_DISPLAY priority
* if they can block the main rendering thread in any way.
*
* the priority of the current thread can be set with:
*
* #include <sys/resource.h>
* setpriority(PRIO_PROCESS, 0, HAL_PRIORITY_URGENT_DISPLAY);
*
*/
#define HAL_PRIORITY_URGENT_DISPLAY (-8)
/*
* Structure for describing YCbCr formats for consumption by applications.
* This is used with HAL_PIXEL_FORMAT_YCbCr_*_888.
*
* Buffer chroma subsampling is defined in the format.
* e.g. HAL_PIXEL_FORMAT_YCbCr_420_888 has subsampling 4:2:0.
*
* Buffers must have a 8 bit depth.
*
* y, cb, and cr point to the first byte of their respective planes.
*
* Stride describes the distance in bytes from the first value of one row of
* the image to the first value of the next row. It includes the width of the
* image plus padding.
* ystride is the stride of the luma plane.
* cstride is the stride of the chroma planes.
*
* chroma_step is the distance in bytes from one chroma pixel value to the
* next. This is 2 bytes for semiplanar (because chroma values are interleaved
* and each chroma value is one byte) and 1 for planar.
*/
struct android_ycbcr {
void *y;
void *cb;
void *cr;
size_t ystride;
size_t cstride;
size_t chroma_step;
/** reserved for future use, set to 0 by gralloc's (*lock_ycbcr)() */
uint32_t reserved[8];
};
/*
* Structures for describing flexible YUVA/RGBA formats for consumption by
* applications. Such flexible formats contain a plane for each component (e.g.
* red, green, blue), where each plane is laid out in a grid-like pattern
* occupying unique byte addresses and with consistent byte offsets between
* neighboring pixels.
*
* The android_flex_layout structure is used with any pixel format that can be
* represented by it, such as:
* - HAL_PIXEL_FORMAT_YCbCr_*_888
* - HAL_PIXEL_FORMAT_FLEX_RGB*_888
* - HAL_PIXEL_FORMAT_RGB[AX]_888[8],BGRA_8888,RGB_888
* - HAL_PIXEL_FORMAT_YV12,Y8,Y16,YCbCr_422_SP/I,YCrCb_420_SP
* - even implementation defined formats that can be represented by
* the structures
*
* Vertical increment (aka. row increment or stride) describes the distance in
* bytes from the first pixel of one row to the first pixel of the next row
* (below) for the component plane. This can be negative.
*
* Horizontal increment (aka. column or pixel increment) describes the distance
* in bytes from one pixel to the next pixel (to the right) on the same row for
* the component plane. This can be negative.
*
* Each plane can be subsampled either vertically or horizontally by
* a power-of-two factor.
*
* The bit-depth of each component can be arbitrary, as long as the pixels are
* laid out on whole bytes, in native byte-order, using the most significant
* bits of each unit.
*/
typedef enum android_flex_component {
/* luma */
FLEX_COMPONENT_Y = 1 << 0,
/* chroma blue */
FLEX_COMPONENT_Cb = 1 << 1,
/* chroma red */
FLEX_COMPONENT_Cr = 1 << 2,
/* red */
FLEX_COMPONENT_R = 1 << 10,
/* green */
FLEX_COMPONENT_G = 1 << 11,
/* blue */
FLEX_COMPONENT_B = 1 << 12,
/* alpha */
FLEX_COMPONENT_A = 1 << 30,
} android_flex_component_t;
typedef struct android_flex_plane {
/* pointer to the first byte of the top-left pixel of the plane. */
uint8_t *top_left;
android_flex_component_t component;
/* bits allocated for the component in each pixel. Must be a positive
multiple of 8. */
int32_t bits_per_component;
/* number of the most significant bits used in the format for this
component. Must be between 1 and bits_per_component, inclusive. */
int32_t bits_used;
/* horizontal increment */
int32_t h_increment;
/* vertical increment */
int32_t v_increment;
/* horizontal subsampling. Must be a positive power of 2. */
int32_t h_subsampling;
/* vertical subsampling. Must be a positive power of 2. */
int32_t v_subsampling;
} android_flex_plane_t;
typedef enum android_flex_format {
/* not a flexible format */
FLEX_FORMAT_INVALID = 0x0,
FLEX_FORMAT_Y = FLEX_COMPONENT_Y,
FLEX_FORMAT_YCbCr = FLEX_COMPONENT_Y | FLEX_COMPONENT_Cb | FLEX_COMPONENT_Cr,
FLEX_FORMAT_YCbCrA = FLEX_FORMAT_YCbCr | FLEX_COMPONENT_A,
FLEX_FORMAT_RGB = FLEX_COMPONENT_R | FLEX_COMPONENT_G | FLEX_COMPONENT_B,
FLEX_FORMAT_RGBA = FLEX_FORMAT_RGB | FLEX_COMPONENT_A,
} android_flex_format_t;
typedef struct android_flex_layout {
/* the kind of flexible format */
android_flex_format_t format;
/* number of planes; 0 for FLEX_FORMAT_INVALID */
uint32_t num_planes;
/* a plane for each component; ordered in increasing component value order.
E.g. FLEX_FORMAT_RGBA maps 0 -> R, 1 -> G, etc.
Can be NULL for FLEX_FORMAT_INVALID */
android_flex_plane_t *planes;
} android_flex_layout_t;
/**
* Structure used to define depth point clouds for format HAL_PIXEL_FORMAT_BLOB
* with dataSpace value of HAL_DATASPACE_DEPTH.
* When locking a native buffer of the above format and dataSpace value,
* the vaddr pointer can be cast to this structure.
*
* A variable-length list of (x,y,z, confidence) 3D points, as floats. (x, y,
* z) represents a measured point's position, with the coordinate system defined
* by the data source. Confidence represents the estimated likelihood that this
* measurement is correct. It is between 0.f and 1.f, inclusive, with 1.f ==
* 100% confidence.
*
* num_points is the number of points in the list
*
* xyz_points is the flexible array of floating-point values.
* It contains (num_points) * 4 floats.
*
* For example:
* android_depth_points d = get_depth_buffer();
* struct {
* float x; float y; float z; float confidence;
* } firstPoint, lastPoint;
*
* firstPoint.x = d.xyzc_points[0];
* firstPoint.y = d.xyzc_points[1];
* firstPoint.z = d.xyzc_points[2];
* firstPoint.confidence = d.xyzc_points[3];
* lastPoint.x = d.xyzc_points[(d.num_points - 1) * 4 + 0];
* lastPoint.y = d.xyzc_points[(d.num_points - 1) * 4 + 1];
* lastPoint.z = d.xyzc_points[(d.num_points - 1) * 4 + 2];
* lastPoint.confidence = d.xyzc_points[(d.num_points - 1) * 4 + 3];
*/
struct android_depth_points {
uint32_t num_points;
/** reserved for future use, set to 0 by gralloc's (*lock)() */
uint32_t reserved[8];
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wc99-extensions"
#endif
float xyzc_points[];
#if defined(__clang__)
#pragma clang diagnostic pop
#endif
};
/**
* These structures are used to define the reference display's
* capabilities for HDR content. Display engine can use this
* to better tone map content to user's display.
* Color is defined in CIE XYZ coordinates
*/
struct android_xy_color {
float x;
float y;
};
struct android_smpte2086_metadata {
struct android_xy_color displayPrimaryRed;
struct android_xy_color displayPrimaryGreen;
struct android_xy_color displayPrimaryBlue;
struct android_xy_color whitePoint;
float maxLuminance;
float minLuminance;
};
struct android_cta861_3_metadata {
float maxContentLightLevel;
float maxFrameAverageLightLevel;
};
#ifdef __cplusplus
}
#endif
#endif /* SYSTEM_CORE_INCLUDE_ANDROID_GRAPHICS_H */

View File

@ -0,0 +1,36 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2021, Ideas on Board Oy
*
* Call stack backtraces
*/
#pragma once
#include <string>
#include <vector>
#include <libcamera/base/private.h>
#include <libcamera/base/class.h>
namespace libcamera {
class Backtrace
{
public:
Backtrace();
std::string toString(unsigned int skipLevels = 0) const;
private:
LIBCAMERA_DISABLE_COPY(Backtrace)
bool backtraceTrace();
bool unwindTrace();
std::vector<void *> backtrace_;
std::vector<std::string> backtraceText_;
};
} /* namespace libcamera */

View File

@ -0,0 +1,224 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Method bind and invocation
*/
#pragma once
#include <memory>
#include <tuple>
#include <type_traits>
#include <utility>
namespace libcamera {
class Object;
enum ConnectionType {
ConnectionTypeAuto,
ConnectionTypeDirect,
ConnectionTypeQueued,
ConnectionTypeBlocking,
};
class BoundMethodPackBase
{
public:
virtual ~BoundMethodPackBase() = default;
};
template<typename R, typename... Args>
class BoundMethodPack : public BoundMethodPackBase
{
public:
BoundMethodPack(const Args &... args)
: args_(args...)
{
}
R returnValue()
{
return ret_;
}
std::tuple<typename std::remove_reference_t<Args>...> args_;
R ret_;
};
template<typename... Args>
class BoundMethodPack<void, Args...> : public BoundMethodPackBase
{
public:
BoundMethodPack(const Args &... args)
: args_(args...)
{
}
void returnValue()
{
}
std::tuple<typename std::remove_reference_t<Args>...> args_;
};
class BoundMethodBase
{
public:
BoundMethodBase(void *obj, Object *object, ConnectionType type)
: obj_(obj), object_(object), connectionType_(type)
{
}
virtual ~BoundMethodBase() = default;
template<typename T, std::enable_if_t<!std::is_same<Object, T>::value> * = nullptr>
bool match(T *obj) { return obj == obj_; }
bool match(Object *object) { return object == object_; }
Object *object() const { return object_; }
virtual void invokePack(BoundMethodPackBase *pack) = 0;
protected:
bool activatePack(std::shared_ptr<BoundMethodPackBase> pack,
bool deleteMethod);
void *obj_;
Object *object_;
private:
ConnectionType connectionType_;
};
template<typename R, typename... Args>
class BoundMethodArgs : public BoundMethodBase
{
public:
using PackType = BoundMethodPack<R, Args...>;
private:
template<std::size_t... I, typename T = R>
std::enable_if_t<!std::is_void<T>::value, void>
invokePack(BoundMethodPackBase *pack, std::index_sequence<I...>)
{
PackType *args = static_cast<PackType *>(pack);
args->ret_ = invoke(std::get<I>(args->args_)...);
}
template<std::size_t... I, typename T = R>
std::enable_if_t<std::is_void<T>::value, void>
invokePack(BoundMethodPackBase *pack, std::index_sequence<I...>)
{
/* args is effectively unused when the sequence I is empty. */
PackType *args [[gnu::unused]] = static_cast<PackType *>(pack);
invoke(std::get<I>(args->args_)...);
}
public:
BoundMethodArgs(void *obj, Object *object, ConnectionType type)
: BoundMethodBase(obj, object, type) {}
void invokePack(BoundMethodPackBase *pack) override
{
invokePack(pack, std::make_index_sequence<sizeof...(Args)>{});
}
virtual R activate(Args... args, bool deleteMethod = false) = 0;
virtual R invoke(Args... args) = 0;
};
template<typename T, typename R, typename Func, typename... Args>
class BoundMethodFunctor : public BoundMethodArgs<R, Args...>
{
public:
using PackType = typename BoundMethodArgs<R, Args...>::PackType;
BoundMethodFunctor(T *obj, Object *object, Func func,
ConnectionType type = ConnectionTypeAuto)
: BoundMethodArgs<R, Args...>(obj, object, type), func_(func)
{
}
R activate(Args... args, bool deleteMethod = false) override
{
if (!this->object_)
return func_(args...);
auto pack = std::make_shared<PackType>(args...);
bool sync = BoundMethodBase::activatePack(pack, deleteMethod);
return sync ? pack->returnValue() : R();
}
R invoke(Args... args) override
{
return func_(args...);
}
private:
Func func_;
};
template<typename T, typename R, typename... Args>
class BoundMethodMember : public BoundMethodArgs<R, Args...>
{
public:
using PackType = typename BoundMethodArgs<R, Args...>::PackType;
BoundMethodMember(T *obj, Object *object, R (T::*func)(Args...),
ConnectionType type = ConnectionTypeAuto)
: BoundMethodArgs<R, Args...>(obj, object, type), func_(func)
{
}
bool match(R (T::*func)(Args...)) const { return func == func_; }
R activate(Args... args, bool deleteMethod = false) override
{
if (!this->object_) {
T *obj = static_cast<T *>(this->obj_);
return (obj->*func_)(args...);
}
auto pack = std::make_shared<PackType>(args...);
bool sync = BoundMethodBase::activatePack(pack, deleteMethod);
return sync ? pack->returnValue() : R();
}
R invoke(Args... args) override
{
T *obj = static_cast<T *>(this->obj_);
return (obj->*func_)(args...);
}
private:
R (T::*func_)(Args...);
};
template<typename R, typename... Args>
class BoundMethodStatic : public BoundMethodArgs<R, Args...>
{
public:
BoundMethodStatic(R (*func)(Args...))
: BoundMethodArgs<R, Args...>(nullptr, nullptr, ConnectionTypeAuto),
func_(func)
{
}
bool match(R (*func)(Args...)) const { return func == func_; }
R activate(Args... args, [[maybe_unused]] bool deleteMethod = false) override
{
return (*func_)(args...);
}
R invoke(Args...) override
{
return R();
}
private:
R (*func_)(Args...);
};
} /* namespace libcamera */

View File

@ -0,0 +1,109 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
* Utilities and helpers for classes
*/
#pragma once
#include <memory>
namespace libcamera {
#ifndef __DOXYGEN__
#define LIBCAMERA_DISABLE_COPY(klass) \
klass(const klass &) = delete; \
klass &operator=(const klass &) = delete;
#define LIBCAMERA_DISABLE_MOVE(klass) \
klass(klass &&) = delete; \
klass &operator=(klass &&) = delete;
#define LIBCAMERA_DISABLE_COPY_AND_MOVE(klass) \
LIBCAMERA_DISABLE_COPY(klass) \
LIBCAMERA_DISABLE_MOVE(klass)
#else
#define LIBCAMERA_DISABLE_COPY(klass)
#define LIBCAMERA_DISABLE_MOVE(klass)
#define LIBCAMERA_DISABLE_COPY_AND_MOVE(klass)
#endif
#ifndef __DOXYGEN__
#define LIBCAMERA_DECLARE_PRIVATE() \
public: \
class Private; \
friend class Private; \
template <bool B = true> \
const Private *_d() const \
{ \
return Extensible::_d<Private>(); \
} \
template <bool B = true> \
Private *_d() \
{ \
return Extensible::_d<Private>(); \
}
#define LIBCAMERA_DECLARE_PUBLIC(klass) \
friend class klass; \
using Public = klass;
#define LIBCAMERA_O_PTR() \
_o<Public>()
#else
#define LIBCAMERA_DECLARE_PRIVATE()
#define LIBCAMERA_DECLARE_PUBLIC(klass)
#define LIBCAMERA_O_PTR()
#endif
class Extensible
{
public:
class Private
{
public:
Private();
virtual ~Private();
#ifndef __DOXYGEN__
template<typename T>
const T *_o() const
{
return static_cast<const T *>(o_);
}
template<typename T>
T *_o()
{
return static_cast<T *>(o_);
}
#endif
private:
/* To initialize o_ from Extensible. */
friend class Extensible;
Extensible *const o_;
};
Extensible(std::unique_ptr<Private> d);
protected:
template<typename T>
const T *_d() const
{
return static_cast<const T *>(d_.get());
}
template<typename T>
T *_d()
{
return static_cast<T *>(d_.get());
}
private:
const std::unique_ptr<Private> d_;
};
} /* namespace libcamera */

View File

@ -0,0 +1,14 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2021, Google Inc.
*
* Compiler support
*/
#pragma once
#if __cplusplus >= 201703L
#define __nodiscard [[nodiscard]]
#else
#define __nodiscard
#endif

View File

@ -0,0 +1,35 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Event dispatcher
*/
#pragma once
#include <vector>
#include <libcamera/base/private.h>
namespace libcamera {
class EventNotifier;
class Timer;
class EventDispatcher
{
public:
virtual ~EventDispatcher();
virtual void registerEventNotifier(EventNotifier *notifier) = 0;
virtual void unregisterEventNotifier(EventNotifier *notifier) = 0;
virtual void registerTimer(Timer *timer) = 0;
virtual void unregisterTimer(Timer *timer) = 0;
virtual void processEvents() = 0;
virtual void interrupt() = 0;
};
} /* namespace libcamera */

View File

@ -0,0 +1,59 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Poll-based event dispatcher
*/
#pragma once
#include <list>
#include <map>
#include <vector>
#include <libcamera/base/private.h>
#include <libcamera/base/event_dispatcher.h>
#include <libcamera/base/unique_fd.h>
struct pollfd;
namespace libcamera {
class EventNotifier;
class Timer;
class EventDispatcherPoll final : public EventDispatcher
{
public:
EventDispatcherPoll();
~EventDispatcherPoll();
void registerEventNotifier(EventNotifier *notifier);
void unregisterEventNotifier(EventNotifier *notifier);
void registerTimer(Timer *timer);
void unregisterTimer(Timer *timer);
void processEvents();
void interrupt();
private:
struct EventNotifierSetPoll {
short events() const;
EventNotifier *notifiers[3];
};
int poll(std::vector<struct pollfd> *pollfds);
void processInterrupt(const struct pollfd &pfd);
void processNotifiers(const std::vector<struct pollfd> &pollfds);
void processTimers();
std::map<int, EventNotifierSetPoll> notifiers_;
std::list<Timer *> timers_;
UniqueFD eventfd_;
bool processingEvents_;
};
} /* namespace libcamera */

View File

@ -0,0 +1,48 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* File descriptor event notifier
*/
#pragma once
#include <libcamera/base/private.h>
#include <libcamera/base/object.h>
#include <libcamera/base/signal.h>
namespace libcamera {
class Message;
class EventNotifier : public Object
{
public:
enum Type {
Read,
Write,
Exception,
};
EventNotifier(int fd, Type type, Object *parent = nullptr);
virtual ~EventNotifier();
Type type() const { return type_; }
int fd() const { return fd_; }
bool enabled() const { return enabled_; }
void setEnabled(bool enable);
Signal<> activated;
protected:
void message(Message *msg) override;
private:
int fd_;
Type type_;
bool enabled_;
};
} /* namespace libcamera */

View File

@ -0,0 +1,87 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
* File I/O operations
*/
#pragma once
#include <sys/types.h>
#include <map>
#include <string>
#include <libcamera/base/private.h>
#include <libcamera/base/class.h>
#include <libcamera/base/flags.h>
#include <libcamera/base/span.h>
#include <libcamera/base/unique_fd.h>
namespace libcamera {
class File
{
public:
enum class MapFlag {
NoOption = 0,
Private = (1 << 0),
};
using MapFlags = Flags<MapFlag>;
enum class OpenModeFlag {
NotOpen = 0,
ReadOnly = (1 << 0),
WriteOnly = (1 << 1),
ReadWrite = ReadOnly | WriteOnly,
};
using OpenMode = Flags<OpenModeFlag>;
File(const std::string &name);
File();
~File();
const std::string &fileName() const { return name_; }
void setFileName(const std::string &name);
bool exists() const;
bool open(OpenMode mode);
bool isOpen() const { return fd_.isValid(); }
OpenMode openMode() const { return mode_; }
void close();
int error() const { return error_; }
ssize_t size() const;
off_t pos() const;
off_t seek(off_t pos);
ssize_t read(const Span<uint8_t> &data);
ssize_t write(const Span<const uint8_t> &data);
Span<uint8_t> map(off_t offset = 0, ssize_t size = -1,
MapFlags flags = MapFlag::NoOption);
bool unmap(uint8_t *addr);
static bool exists(const std::string &name);
private:
LIBCAMERA_DISABLE_COPY(File)
void unmapAll();
std::string name_;
UniqueFD fd_;
OpenMode mode_;
int error_;
std::map<void *, size_t> maps_;
};
LIBCAMERA_FLAGS_ENABLE_OPERATORS(File::MapFlag)
LIBCAMERA_FLAGS_ENABLE_OPERATORS(File::OpenModeFlag)
} /* namespace libcamera */

View File

@ -0,0 +1,193 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
* Type-safe enum-based bitfields
*/
#pragma once
#include <type_traits>
namespace libcamera {
template<typename E>
class Flags
{
public:
static_assert(std::is_enum<E>::value,
"Flags<> template parameter must be an enum");
using Type = std::underlying_type_t<E>;
constexpr Flags()
: value_(0)
{
}
constexpr Flags(E flag)
: value_(static_cast<Type>(flag))
{
}
constexpr Flags &operator&=(E flag)
{
value_ &= static_cast<Type>(flag);
return *this;
}
constexpr Flags &operator&=(Flags other)
{
value_ &= other.value_;
return *this;
}
constexpr Flags &operator|=(E flag)
{
value_ |= static_cast<Type>(flag);
return *this;
}
constexpr Flags &operator|=(Flags other)
{
value_ |= other.value_;
return *this;
}
constexpr Flags &operator^=(E flag)
{
value_ ^= static_cast<Type>(flag);
return *this;
}
constexpr Flags &operator^=(Flags other)
{
value_ ^= other.value_;
return *this;
}
constexpr bool operator==(E flag)
{
return value_ == static_cast<Type>(flag);
}
constexpr bool operator==(Flags other)
{
return value_ == static_cast<Type>(other);
}
constexpr bool operator!=(E flag)
{
return value_ != static_cast<Type>(flag);
}
constexpr bool operator!=(Flags other)
{
return value_ != static_cast<Type>(other);
}
constexpr explicit operator Type() const
{
return value_;
}
constexpr explicit operator bool() const
{
return !!value_;
}
constexpr Flags operator&(E flag) const
{
return Flags(static_cast<E>(value_ & static_cast<Type>(flag)));
}
constexpr Flags operator&(Flags other) const
{
return Flags(static_cast<E>(value_ & other.value_));
}
constexpr Flags operator|(E flag) const
{
return Flags(static_cast<E>(value_ | static_cast<Type>(flag)));
}
constexpr Flags operator|(Flags other) const
{
return Flags(static_cast<E>(value_ | other.value_));
}
constexpr Flags operator^(E flag) const
{
return Flags(static_cast<E>(value_ ^ static_cast<Type>(flag)));
}
constexpr Flags operator^(Flags other) const
{
return Flags(static_cast<E>(value_ ^ other.value_));
}
constexpr Flags operator~() const
{
return Flags(static_cast<E>(~value_));
}
constexpr bool operator!() const
{
return !value_;
}
private:
Type value_;
};
#ifndef __DOXYGEN__
template<typename E>
struct flags_enable_operators {
static const bool enable = false;
};
template<typename E>
std::enable_if_t<flags_enable_operators<E>::enable, Flags<E>>
operator|(E lhs, E rhs)
{
using type = std::underlying_type_t<E>;
return Flags<E>(static_cast<E>(static_cast<type>(lhs) | static_cast<type>(rhs)));
}
template<typename E>
std::enable_if_t<flags_enable_operators<E>::enable, Flags<E>>
operator&(E lhs, E rhs)
{
using type = std::underlying_type_t<E>;
return Flags<E>(static_cast<E>(static_cast<type>(lhs) & static_cast<type>(rhs)));
}
template<typename E>
std::enable_if_t<flags_enable_operators<E>::enable, Flags<E>>
operator^(E lhs, E rhs)
{
using type = std::underlying_type_t<E>;
return Flags<E>(static_cast<E>(static_cast<type>(lhs) ^ static_cast<type>(rhs)));
}
template<typename E>
std::enable_if_t<flags_enable_operators<E>::enable, Flags<E>>
operator~(E rhs)
{
using type = std::underlying_type_t<E>;
return Flags<E>(static_cast<E>(~static_cast<type>(rhs)));
}
#define LIBCAMERA_FLAGS_ENABLE_OPERATORS(_enum) \
template<> \
struct flags_enable_operators<_enum> { \
static const bool enable = true; \
};
#else /* __DOXYGEN__ */
#define LIBCAMERA_FLAGS_ENABLE_OPERATORS(_enum)
#endif /* __DOXYGEN__ */
} /* namespace libcamera */

View File

@ -0,0 +1,136 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2018, Google Inc.
*
* Logging infrastructure
*/
#pragma once
#include <chrono>
#include <sstream>
#include <libcamera/base/private.h>
#include <libcamera/base/class.h>
#include <libcamera/base/utils.h>
namespace libcamera {
enum LogSeverity {
LogInvalid = -1,
LogDebug = 0,
LogInfo,
LogWarning,
LogError,
LogFatal,
};
class LogCategory
{
public:
static LogCategory *create(const char *name);
const std::string &name() const { return name_; }
LogSeverity severity() const { return severity_; }
void setSeverity(LogSeverity severity);
static const LogCategory &defaultCategory();
private:
explicit LogCategory(const char *name);
const std::string name_;
LogSeverity severity_;
};
#define LOG_DECLARE_CATEGORY(name) \
extern const LogCategory &_LOG_CATEGORY(name)();
#define LOG_DEFINE_CATEGORY(name) \
LOG_DECLARE_CATEGORY(name) \
const LogCategory &_LOG_CATEGORY(name)() \
{ \
/* The instance will be deleted by the Logger destructor. */ \
static LogCategory *category = LogCategory::create(#name); \
return *category; \
}
class LogMessage
{
public:
LogMessage(const char *fileName, unsigned int line,
const LogCategory &category, LogSeverity severity,
const std::string &prefix = std::string());
LogMessage(LogMessage &&);
~LogMessage();
std::ostream &stream() { return msgStream_; }
const utils::time_point &timestamp() const { return timestamp_; }
LogSeverity severity() const { return severity_; }
const LogCategory &category() const { return category_; }
const std::string &fileInfo() const { return fileInfo_; }
const std::string &prefix() const { return prefix_; }
const std::string msg() const { return msgStream_.str(); }
private:
LIBCAMERA_DISABLE_COPY(LogMessage)
void init(const char *fileName, unsigned int line);
std::ostringstream msgStream_;
const LogCategory &category_;
LogSeverity severity_;
utils::time_point timestamp_;
std::string fileInfo_;
std::string prefix_;
};
class Loggable
{
public:
virtual ~Loggable();
protected:
virtual std::string logPrefix() const = 0;
LogMessage _log(const LogCategory *category, LogSeverity severity,
const char *fileName = __builtin_FILE(),
unsigned int line = __builtin_LINE()) const;
};
LogMessage _log(const LogCategory *category, LogSeverity severity,
const char *fileName = __builtin_FILE(),
unsigned int line = __builtin_LINE());
#ifndef __DOXYGEN__
#define _LOG_CATEGORY(name) logCategory##name
#define _LOG1(severity) \
_log(nullptr, Log##severity).stream()
#define _LOG2(category, severity) \
_log(&_LOG_CATEGORY(category)(), Log##severity).stream()
/*
* Expand the LOG() macro to _LOG1() or _LOG2() based on the number of
* arguments.
*/
#define _LOG_MACRO(_1, _2, NAME, ...) NAME
#define LOG(...) _LOG_MACRO(__VA_ARGS__, _LOG2, _LOG1)(__VA_ARGS__)
#else /* __DOXYGEN___ */
#define LOG(category, severity)
#endif /* __DOXYGEN__ */
#ifndef NDEBUG
#define ASSERT(condition) static_cast<void>(({ \
if (!(condition)) \
LOG(Fatal) << "assertion \"" #condition "\" failed in " \
<< __func__ << "()"; \
}))
#else
#define ASSERT(condition) static_cast<void>(false && (condition))
#endif
} /* namespace libcamera */

View File

@ -0,0 +1,40 @@
# SPDX-License-Identifier: CC0-1.0
libcamera_base_include_dir = libcamera_include_dir / 'base'
libcamera_base_public_headers = files([
'bound_method.h',
'class.h',
'compiler.h',
'flags.h',
'object.h',
'shared_fd.h',
'signal.h',
'span.h',
'unique_fd.h',
])
libcamera_base_private_headers = files([
'backtrace.h',
'event_dispatcher.h',
'event_dispatcher_poll.h',
'event_notifier.h',
'file.h',
'log.h',
'message.h',
'mutex.h',
'private.h',
'semaphore.h',
'thread.h',
'thread_annotations.h',
'timer.h',
'utils.h',
])
libcamera_base_headers = [
libcamera_base_public_headers,
libcamera_base_private_headers,
]
install_headers(libcamera_base_public_headers,
subdir : libcamera_base_include_dir)

View File

@ -0,0 +1,71 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Message queue support
*/
#pragma once
#include <atomic>
#include <libcamera/base/private.h>
#include <libcamera/base/bound_method.h>
namespace libcamera {
class BoundMethodBase;
class Object;
class Semaphore;
class Thread;
class Message
{
public:
enum Type {
None = 0,
InvokeMessage = 1,
ThreadMoveMessage = 2,
DeferredDelete = 3,
UserMessage = 1000,
};
Message(Type type);
virtual ~Message();
Type type() const { return type_; }
Object *receiver() const { return receiver_; }
static Type registerMessageType();
private:
friend class Thread;
Type type_;
Object *receiver_;
static std::atomic_uint nextUserType_;
};
class InvokeMessage : public Message
{
public:
InvokeMessage(BoundMethodBase *method,
std::shared_ptr<BoundMethodPackBase> pack,
Semaphore *semaphore = nullptr,
bool deleteMethod = false);
~InvokeMessage();
Semaphore *semaphore() const { return semaphore_; }
void invoke();
private:
BoundMethodBase *method_;
std::shared_ptr<BoundMethodPackBase> pack_;
Semaphore *semaphore_;
bool deleteMethod_;
};
} /* namespace libcamera */

View File

@ -0,0 +1,134 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2021, Google Inc.
*
* Mutex classes with clang thread safety annotation
*/
#pragma once
#include <condition_variable>
#include <mutex>
#include <libcamera/base/private.h>
#include <libcamera/base/thread_annotations.h>
namespace libcamera {
/* \todo using Mutex = std::mutex if libc++ is used. */
#ifndef __DOXYGEN__
class LIBCAMERA_TSA_CAPABILITY("mutex") Mutex final
{
public:
constexpr Mutex()
{
}
void lock() LIBCAMERA_TSA_ACQUIRE()
{
mutex_.lock();
}
void unlock() LIBCAMERA_TSA_RELEASE()
{
mutex_.unlock();
}
private:
friend class MutexLocker;
std::mutex mutex_;
};
class LIBCAMERA_TSA_SCOPED_CAPABILITY MutexLocker final
{
public:
explicit MutexLocker(Mutex &mutex) LIBCAMERA_TSA_ACQUIRE(mutex)
: lock_(mutex.mutex_)
{
}
MutexLocker(Mutex &mutex, std::defer_lock_t t) noexcept LIBCAMERA_TSA_EXCLUDES(mutex)
: lock_(mutex.mutex_, t)
{
}
~MutexLocker() LIBCAMERA_TSA_RELEASE()
{
}
void lock() LIBCAMERA_TSA_ACQUIRE()
{
lock_.lock();
}
bool try_lock() LIBCAMERA_TSA_TRY_ACQUIRE(true)
{
return lock_.try_lock();
}
void unlock() LIBCAMERA_TSA_RELEASE()
{
lock_.unlock();
}
private:
friend class ConditionVariable;
std::unique_lock<std::mutex> lock_;
};
class ConditionVariable final
{
public:
ConditionVariable()
{
}
void notify_one() noexcept
{
cv_.notify_one();
}
void notify_all() noexcept
{
cv_.notify_all();
}
template<class Predicate>
void wait(MutexLocker &locker, Predicate stopWaiting)
{
cv_.wait(locker.lock_, stopWaiting);
}
template<class Rep, class Period, class Predicate>
bool wait_for(MutexLocker &locker,
const std::chrono::duration<Rep, Period> &relTime,
Predicate stopWaiting)
{
return cv_.wait_for(locker.lock_, relTime, stopWaiting);
}
private:
std::condition_variable cv_;
};
#else /* __DOXYGEN__ */
class Mutex final
{
};
class MutexLocker final
{
};
class ConditionVariable final
{
};
#endif /* __DOXYGEN__ */
} /* namespace libcamera */

View File

@ -0,0 +1,71 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Base object
*/
#pragma once
#include <list>
#include <memory>
#include <vector>
#include <libcamera/base/bound_method.h>
namespace libcamera {
class Message;
template<typename... Args>
class Signal;
class SignalBase;
class Thread;
class Object
{
public:
Object(Object *parent = nullptr);
virtual ~Object();
void deleteLater();
void postMessage(std::unique_ptr<Message> msg);
template<typename T, typename R, typename... FuncArgs, typename... Args,
std::enable_if_t<std::is_base_of<Object, T>::value> * = nullptr>
R invokeMethod(R (T::*func)(FuncArgs...), ConnectionType type,
Args&&... args)
{
T *obj = static_cast<T *>(this);
auto *method = new BoundMethodMember<T, R, FuncArgs...>(obj, this, func, type);
return method->activate(args..., true);
}
Thread *thread() const { return thread_; }
void moveToThread(Thread *thread);
Object *parent() const { return parent_; }
protected:
virtual void message(Message *msg);
bool assertThreadBound(const char *message);
private:
friend class SignalBase;
friend class Thread;
void notifyThreadMove();
void connect(SignalBase *signal);
void disconnect(SignalBase *signal);
Object *parent_;
std::vector<Object *> children_;
Thread *thread_;
std::list<SignalBase *> signals_;
unsigned int pendingMessages_;
};
} /* namespace libcamera */

View File

@ -0,0 +1,22 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2021, Google Inc.
*
* Private Header Validation
*
* A selection of internal libcamera headers are installed as part
* of the libcamera package to allow sharing of a select subset of
* internal functionality with IPA module only.
*
* This functionality is not considered part of the public libcamera
* API, and can therefore potentially face ABI instabilities which
* should not be exposed to applications. IPA modules however should be
* versioned and more closely matched to the libcamera installation.
*
* Components which include this file can not be included in any file
* which forms part of the libcamera API.
*/
#ifndef LIBCAMERA_BASE_PRIVATE
#error "Private headers must not be included in the libcamera API"
#endif

View File

@ -0,0 +1,32 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* General-purpose counting semaphore
*/
#pragma once
#include <libcamera/base/private.h>
#include <libcamera/base/mutex.h>
namespace libcamera {
class Semaphore
{
public:
Semaphore(unsigned int n = 0);
unsigned int available() LIBCAMERA_TSA_EXCLUDES(mutex_);
void acquire(unsigned int n = 1) LIBCAMERA_TSA_EXCLUDES(mutex_);
bool tryAcquire(unsigned int n = 1) LIBCAMERA_TSA_EXCLUDES(mutex_);
void release(unsigned int n = 1) LIBCAMERA_TSA_EXCLUDES(mutex_);
private:
Mutex mutex_;
ConditionVariable cv_;
unsigned int available_ LIBCAMERA_TSA_GUARDED_BY(mutex_);
};
} /* namespace libcamera */

View File

@ -0,0 +1,59 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* File descriptor wrapper with shared ownership
*/
#pragma once
#include <memory>
namespace libcamera {
class UniqueFD;
class SharedFD final
{
public:
explicit SharedFD(const int &fd = -1);
explicit SharedFD(int &&fd);
explicit SharedFD(UniqueFD fd);
SharedFD(const SharedFD &other);
SharedFD(SharedFD &&other);
~SharedFD();
SharedFD &operator=(const SharedFD &other);
SharedFD &operator=(SharedFD &&other);
bool isValid() const { return fd_ != nullptr; }
int get() const { return fd_ ? fd_->fd() : -1; }
UniqueFD dup() const;
private:
class Descriptor
{
public:
Descriptor(int fd, bool duplicate);
~Descriptor();
int fd() const { return fd_; }
private:
int fd_;
};
std::shared_ptr<Descriptor> fd_;
};
static inline bool operator==(const SharedFD &lhs, const SharedFD &rhs)
{
return lhs.get() == rhs.get();
}
static inline bool operator!=(const SharedFD &lhs, const SharedFD &rhs)
{
return !(lhs == rhs);
}
} /* namespace libcamera */

View File

@ -0,0 +1,158 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Signal & slot implementation
*/
#pragma once
#include <functional>
#include <list>
#include <type_traits>
#include <vector>
#include <libcamera/base/bound_method.h>
namespace libcamera {
class Object;
class SignalBase
{
public:
void disconnect(Object *object);
protected:
using SlotList = std::list<BoundMethodBase *>;
void connect(BoundMethodBase *slot);
void disconnect(std::function<bool(SlotList::iterator &)> match);
SlotList slots();
private:
SlotList slots_;
};
template<typename... Args>
class Signal : public SignalBase
{
public:
~Signal()
{
disconnect();
}
#ifndef __DOXYGEN__
template<typename T, typename R, std::enable_if_t<std::is_base_of<Object, T>::value> * = nullptr>
void connect(T *obj, R (T::*func)(Args...),
ConnectionType type = ConnectionTypeAuto)
{
Object *object = static_cast<Object *>(obj);
SignalBase::connect(new BoundMethodMember<T, R, Args...>(obj, object, func, type));
}
template<typename T, typename R, std::enable_if_t<!std::is_base_of<Object, T>::value> * = nullptr>
#else
template<typename T, typename R>
#endif
void connect(T *obj, R (T::*func)(Args...))
{
SignalBase::connect(new BoundMethodMember<T, R, Args...>(obj, nullptr, func));
}
#ifndef __DOXYGEN__
template<typename T, typename Func,
std::enable_if_t<std::is_base_of<Object, T>::value
#if __cplusplus >= 201703L
&& std::is_invocable_v<Func, Args...>
#endif
> * = nullptr>
void connect(T *obj, Func func, ConnectionType type = ConnectionTypeAuto)
{
Object *object = static_cast<Object *>(obj);
SignalBase::connect(new BoundMethodFunctor<T, void, Func, Args...>(obj, object, func, type));
}
template<typename T, typename Func,
std::enable_if_t<!std::is_base_of<Object, T>::value
#if __cplusplus >= 201703L
&& std::is_invocable_v<Func, Args...>
#endif
> * = nullptr>
#else
template<typename T, typename Func>
#endif
void connect(T *obj, Func func)
{
SignalBase::connect(new BoundMethodFunctor<T, void, Func, Args...>(obj, nullptr, func));
}
template<typename R>
void connect(R (*func)(Args...))
{
SignalBase::connect(new BoundMethodStatic<R, Args...>(func));
}
void disconnect()
{
SignalBase::disconnect([]([[maybe_unused]] SlotList::iterator &iter) {
return true;
});
}
template<typename T>
void disconnect(T *obj)
{
SignalBase::disconnect([obj](SlotList::iterator &iter) {
return (*iter)->match(obj);
});
}
template<typename T, typename R>
void disconnect(T *obj, R (T::*func)(Args...))
{
SignalBase::disconnect([obj, func](SlotList::iterator &iter) {
BoundMethodArgs<R, Args...> *slot =
static_cast<BoundMethodArgs<R, Args...> *>(*iter);
if (!slot->match(obj))
return false;
/*
* If the object matches the slot, the slot is
* guaranteed to be a member slot, so we can safely
* cast it to BoundMethodMember<T, Args...> to match
* func.
*/
return static_cast<BoundMethodMember<T, R, Args...> *>(slot)->match(func);
});
}
template<typename R>
void disconnect(R (*func)(Args...))
{
SignalBase::disconnect([func](SlotList::iterator &iter) {
BoundMethodArgs<R, Args...> *slot =
static_cast<BoundMethodArgs<R, Args...> *>(*iter);
if (!slot->match(nullptr))
return false;
return static_cast<BoundMethodStatic<R, Args...> *>(slot)->match(func);
});
}
void emit(Args... args)
{
/*
* Make a copy of the slots list as the slot could call the
* disconnect operation, invalidating the iterator.
*/
for (BoundMethodBase *slot : slots())
static_cast<BoundMethodArgs<void, Args...> *>(slot)->activate(args...);
}
};
} /* namespace libcamera */

View File

@ -0,0 +1,421 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
* C++20 std::span<> implementation for C++11
*/
#pragma once
#include <array>
#include <iterator>
#include <limits>
#include <stddef.h>
#include <type_traits>
namespace libcamera {
static constexpr std::size_t dynamic_extent = std::numeric_limits<std::size_t>::max();
template<typename T, std::size_t Extent = dynamic_extent>
class Span;
namespace details {
template<typename U>
struct is_array : public std::false_type {
};
template<typename U, std::size_t N>
struct is_array<std::array<U, N>> : public std::true_type {
};
template<typename U>
struct is_span : public std::false_type {
};
template<typename U, std::size_t Extent>
struct is_span<Span<U, Extent>> : public std::true_type {
};
} /* namespace details */
namespace utils {
template<typename C>
constexpr auto size(const C &c) -> decltype(c.size())
{
return c.size();
}
template<typename C>
constexpr auto data(const C &c) -> decltype(c.data())
{
return c.data();
}
template<typename C>
constexpr auto data(C &c) -> decltype(c.data())
{
return c.data();
}
template<class T, std::size_t N>
constexpr T *data(T (&array)[N]) noexcept
{
return array;
}
template<std::size_t I, typename T>
struct tuple_element;
template<std::size_t I, typename T, std::size_t N>
struct tuple_element<I, Span<T, N>> {
using type = T;
};
template<typename T>
struct tuple_size;
template<typename T, std::size_t N>
struct tuple_size<Span<T, N>> : public std::integral_constant<std::size_t, N> {
};
template<typename T>
struct tuple_size<Span<T, dynamic_extent>>;
} /* namespace utils */
template<typename T, std::size_t Extent>
class Span
{
public:
using element_type = T;
using value_type = typename std::remove_cv_t<T>;
using size_type = std::size_t;
using difference_type = std::ptrdiff_t;
using pointer = T *;
using const_pointer = const T *;
using reference = T &;
using const_reference = const T &;
using iterator = pointer;
using const_iterator = const_pointer;
using reverse_iterator = std::reverse_iterator<iterator>;
using const_reverse_iterator = std::reverse_iterator<const_iterator>;
static constexpr std::size_t extent = Extent;
template<bool Dependent = false,
typename = std::enable_if_t<Dependent || Extent == 0>>
constexpr Span() noexcept
: data_(nullptr)
{
}
explicit constexpr Span(pointer ptr, [[maybe_unused]] size_type count)
: data_(ptr)
{
}
explicit constexpr Span(pointer first, [[maybe_unused]] pointer last)
: data_(first)
{
}
template<std::size_t N>
constexpr Span(element_type (&arr)[N],
std::enable_if_t<std::is_convertible<std::remove_pointer_t<decltype(utils::data(arr))> (*)[],
element_type (*)[]>::value &&
N == Extent,
std::nullptr_t> = nullptr) noexcept
: data_(arr)
{
}
template<std::size_t N>
constexpr Span(std::array<value_type, N> &arr,
std::enable_if_t<std::is_convertible<std::remove_pointer_t<decltype(utils::data(arr))> (*)[],
element_type (*)[]>::value &&
N == Extent,
std::nullptr_t> = nullptr) noexcept
: data_(arr.data())
{
}
template<std::size_t N>
constexpr Span(const std::array<value_type, N> &arr,
std::enable_if_t<std::is_convertible<std::remove_pointer_t<decltype(utils::data(arr))> (*)[],
element_type (*)[]>::value &&
N == Extent,
std::nullptr_t> = nullptr) noexcept
: data_(arr.data())
{
}
template<class Container>
explicit constexpr Span(Container &cont,
std::enable_if_t<!details::is_span<Container>::value &&
!details::is_array<Container>::value &&
!std::is_array<Container>::value &&
std::is_convertible<std::remove_pointer_t<decltype(utils::data(cont))> (*)[],
element_type (*)[]>::value,
std::nullptr_t> = nullptr)
: data_(utils::data(cont))
{
}
template<class Container>
explicit constexpr Span(const Container &cont,
std::enable_if_t<!details::is_span<Container>::value &&
!details::is_array<Container>::value &&
!std::is_array<Container>::value &&
std::is_convertible<std::remove_pointer_t<decltype(utils::data(cont))> (*)[],
element_type (*)[]>::value,
std::nullptr_t> = nullptr)
: data_(utils::data(cont))
{
static_assert(utils::size(cont) == Extent, "Size mismatch");
}
template<class U, std::size_t N>
explicit constexpr Span(const Span<U, N> &s,
std::enable_if_t<std::is_convertible<U (*)[], element_type (*)[]>::value &&
N == Extent,
std::nullptr_t> = nullptr) noexcept
: data_(s.data())
{
}
constexpr Span(const Span &other) noexcept = default;
constexpr Span &operator=(const Span &other) noexcept = default;
constexpr iterator begin() const { return data(); }
constexpr const_iterator cbegin() const { return begin(); }
constexpr iterator end() const { return data() + size(); }
constexpr const_iterator cend() const { return end(); }
constexpr reverse_iterator rbegin() const { return reverse_iterator(end()); }
constexpr const_reverse_iterator crbegin() const { return rbegin(); }
constexpr reverse_iterator rend() const { return reverse_iterator(begin()); }
constexpr const_reverse_iterator crend() const { return rend(); }
constexpr reference front() const { return *data(); }
constexpr reference back() const { return *(data() + size() - 1); }
constexpr reference operator[](size_type idx) const { return data()[idx]; }
constexpr pointer data() const noexcept { return data_; }
constexpr size_type size() const noexcept { return Extent; }
constexpr size_type size_bytes() const noexcept { return size() * sizeof(element_type); }
constexpr bool empty() const noexcept { return size() == 0; }
template<std::size_t Count>
constexpr Span<element_type, Count> first() const
{
static_assert(Count <= Extent, "Count larger than size");
return Span<element_type, Count>{ data(), Count };
}
constexpr Span<element_type, dynamic_extent> first(std::size_t Count) const
{
return Span<element_type, dynamic_extent>{ data(), Count };
}
template<std::size_t Count>
constexpr Span<element_type, Count> last() const
{
static_assert(Count <= Extent, "Count larger than size");
return Span<element_type, Count>{ data() + size() - Count, Count };
}
constexpr Span<element_type, dynamic_extent> last(std::size_t Count) const
{
return Span<element_type, dynamic_extent>{ data() + size() - Count, Count };
}
template<std::size_t Offset, std::size_t Count = dynamic_extent>
constexpr Span<element_type, Count != dynamic_extent ? Count : Extent - Offset> subspan() const
{
static_assert(Offset <= Extent, "Offset larger than size");
static_assert(Count == dynamic_extent || Count + Offset <= Extent,
"Offset + Count larger than size");
return Span<element_type, Count != dynamic_extent ? Count : Extent - Offset>{
data() + Offset,
Count == dynamic_extent ? size() - Offset : Count
};
}
constexpr Span<element_type, dynamic_extent>
subspan(std::size_t Offset, std::size_t Count = dynamic_extent) const
{
return Span<element_type, dynamic_extent>{
data() + Offset,
Count == dynamic_extent ? size() - Offset : Count
};
}
private:
pointer data_;
};
template<typename T>
class Span<T, dynamic_extent>
{
public:
using element_type = T;
using value_type = typename std::remove_cv_t<T>;
using size_type = std::size_t;
using difference_type = std::ptrdiff_t;
using pointer = T *;
using const_pointer = const T *;
using reference = T &;
using const_reference = const T &;
using iterator = T *;
using const_iterator = const T *;
using reverse_iterator = std::reverse_iterator<iterator>;
using const_reverse_iterator = std::reverse_iterator<const_iterator>;
static constexpr std::size_t extent = dynamic_extent;
constexpr Span() noexcept
: data_(nullptr), size_(0)
{
}
constexpr Span(pointer ptr, size_type count)
: data_(ptr), size_(count)
{
}
constexpr Span(pointer first, pointer last)
: data_(first), size_(last - first)
{
}
template<std::size_t N>
constexpr Span(element_type (&arr)[N],
std::enable_if_t<std::is_convertible<std::remove_pointer_t<decltype(utils::data(arr))> (*)[],
element_type (*)[]>::value,
std::nullptr_t> = nullptr) noexcept
: data_(arr), size_(N)
{
}
template<std::size_t N>
constexpr Span(std::array<value_type, N> &arr,
std::enable_if_t<std::is_convertible<std::remove_pointer_t<decltype(utils::data(arr))> (*)[],
element_type (*)[]>::value,
std::nullptr_t> = nullptr) noexcept
: data_(utils::data(arr)), size_(N)
{
}
template<std::size_t N>
constexpr Span(const std::array<value_type, N> &arr) noexcept
: data_(utils::data(arr)), size_(N)
{
}
template<class Container>
constexpr Span(Container &cont,
std::enable_if_t<!details::is_span<Container>::value &&
!details::is_array<Container>::value &&
!std::is_array<Container>::value &&
std::is_convertible<std::remove_pointer_t<decltype(utils::data(cont))> (*)[],
element_type (*)[]>::value,
std::nullptr_t> = nullptr)
: data_(utils::data(cont)), size_(utils::size(cont))
{
}
template<class Container>
constexpr Span(const Container &cont,
std::enable_if_t<!details::is_span<Container>::value &&
!details::is_array<Container>::value &&
!std::is_array<Container>::value &&
std::is_convertible<std::remove_pointer_t<decltype(utils::data(cont))> (*)[],
element_type (*)[]>::value,
std::nullptr_t> = nullptr)
: data_(utils::data(cont)), size_(utils::size(cont))
{
}
template<class U, std::size_t N>
constexpr Span(const Span<U, N> &s,
std::enable_if_t<std::is_convertible<U (*)[], element_type (*)[]>::value,
std::nullptr_t> = nullptr) noexcept
: data_(s.data()), size_(s.size())
{
}
constexpr Span(const Span &other) noexcept = default;
constexpr Span &operator=(const Span &other) noexcept
{
data_ = other.data_;
size_ = other.size_;
return *this;
}
constexpr iterator begin() const { return data(); }
constexpr const_iterator cbegin() const { return begin(); }
constexpr iterator end() const { return data() + size(); }
constexpr const_iterator cend() const { return end(); }
constexpr reverse_iterator rbegin() const { return reverse_iterator(end()); }
constexpr const_reverse_iterator crbegin() const { return rbegin(); }
constexpr reverse_iterator rend() const { return reverse_iterator(begin()); }
constexpr const_reverse_iterator crend() const { return rend(); }
constexpr reference front() const { return *data(); }
constexpr reference back() const { return *(data() + size() - 1); }
constexpr reference operator[](size_type idx) const { return data()[idx]; }
constexpr pointer data() const noexcept { return data_; }
constexpr size_type size() const noexcept { return size_; }
constexpr size_type size_bytes() const noexcept { return size() * sizeof(element_type); }
constexpr bool empty() const noexcept { return size() == 0; }
template<std::size_t Count>
constexpr Span<element_type, Count> first() const
{
return Span<element_type, Count>{ data(), Count };
}
constexpr Span<element_type, dynamic_extent> first(std::size_t Count) const
{
return { data(), Count };
}
template<std::size_t Count>
constexpr Span<element_type, Count> last() const
{
return Span<element_type, Count>{ data() + size() - Count, Count };
}
constexpr Span<element_type, dynamic_extent> last(std::size_t Count) const
{
return Span<element_type, dynamic_extent>{ data() + size() - Count, Count };
}
template<std::size_t Offset, std::size_t Count = dynamic_extent>
constexpr Span<element_type, Count> subspan() const
{
return Span<element_type, Count>{
data() + Offset,
Count == dynamic_extent ? size() - Offset : Count
};
}
constexpr Span<element_type, dynamic_extent>
subspan(std::size_t Offset, std::size_t Count = dynamic_extent) const
{
return Span<element_type, dynamic_extent>{
data() + Offset,
Count == dynamic_extent ? size() - Offset : Count
};
}
private:
pointer data_;
size_type size_;
};
} /* namespace libcamera */

Some files were not shown because too many files have changed in this diff Show More