aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/cam/capture-script.yaml29
-rw-r--r--src/cam/capture_script.cpp229
-rw-r--r--src/cam/capture_script.h10
-rw-r--r--src/cam/drm.cpp35
-rw-r--r--src/cam/stream_options.cpp7
-rw-r--r--src/gstreamer/gstlibcamera-utils.cpp185
-rw-r--r--src/gstreamer/meson.build2
-rw-r--r--src/ipa/ipu3/algorithms/af.cpp34
-rw-r--r--src/ipa/ipu3/algorithms/af.h9
-rw-r--r--src/ipa/ipu3/algorithms/agc.cpp10
-rw-r--r--src/ipa/ipu3/algorithms/agc.h5
-rw-r--r--src/ipa/ipu3/algorithms/awb.cpp8
-rw-r--r--src/ipa/ipu3/algorithms/awb.h7
-rw-r--r--src/ipa/ipu3/algorithms/blc.cpp10
-rw-r--r--src/ipa/ipu3/algorithms/blc.h4
-rw-r--r--src/ipa/ipu3/algorithms/tone_mapping.cpp18
-rw-r--r--src/ipa/ipu3/algorithms/tone_mapping.h6
-rw-r--r--src/ipa/ipu3/ipa_context.cpp37
-rw-r--r--src/ipa/ipu3/ipa_context.h18
-rw-r--r--src/ipa/ipu3/ipu3.cpp42
-rw-r--r--src/ipa/libipa/algorithm.cpp4
-rw-r--r--src/ipa/libipa/algorithm.h7
-rw-r--r--src/ipa/libipa/fc_queue.cpp140
-rw-r--r--src/ipa/libipa/fc_queue.h118
-rw-r--r--src/ipa/libipa/meson.build2
-rw-r--r--src/ipa/meson.build8
-rw-r--r--src/ipa/raspberrypi/raspberrypi.cpp1
-rw-r--r--src/ipa/rkisp1/algorithms/agc.cpp46
-rw-r--r--src/ipa/rkisp1/algorithms/agc.h10
-rw-r--r--src/ipa/rkisp1/algorithms/awb.cpp285
-rw-r--r--src/ipa/rkisp1/algorithms/awb.h14
-rw-r--r--src/ipa/rkisp1/algorithms/blc.cpp6
-rw-r--r--src/ipa/rkisp1/algorithms/blc.h4
-rw-r--r--src/ipa/rkisp1/algorithms/cproc.cpp52
-rw-r--r--src/ipa/rkisp1/algorithms/cproc.h5
-rw-r--r--src/ipa/rkisp1/algorithms/dpcc.cpp6
-rw-r--r--src/ipa/rkisp1/algorithms/dpcc.h4
-rw-r--r--src/ipa/rkisp1/algorithms/dpf.cpp265
-rw-r--r--src/ipa/rkisp1/algorithms/dpf.h39
-rw-r--r--src/ipa/rkisp1/algorithms/filter.cpp49
-rw-r--r--src/ipa/rkisp1/algorithms/filter.h5
-rw-r--r--src/ipa/rkisp1/algorithms/gsl.cpp6
-rw-r--r--src/ipa/rkisp1/algorithms/gsl.h4
-rw-r--r--src/ipa/rkisp1/algorithms/lsc.cpp17
-rw-r--r--src/ipa/rkisp1/algorithms/lsc.h5
-rw-r--r--src/ipa/rkisp1/algorithms/meson.build1
-rw-r--r--src/ipa/rkisp1/data/ov5640.yaml15
-rw-r--r--src/ipa/rkisp1/ipa_context.cpp242
-rw-r--r--src/ipa/rkisp1/ipa_context.h67
-rw-r--r--src/ipa/rkisp1/rkisp1.cpp61
-rw-r--r--src/libcamera/base/meson.build4
-rw-r--r--src/libcamera/camera.cpp55
-rw-r--r--src/libcamera/camera_manager.cpp2
-rw-r--r--src/libcamera/camera_sensor.cpp4
-rw-r--r--src/libcamera/color_space.cpp427
-rw-r--r--src/libcamera/control_serializer.cpp28
-rw-r--r--src/libcamera/device_enumerator.cpp2
-rw-r--r--src/libcamera/ipa_manager.cpp2
-rw-r--r--src/libcamera/media_device.cpp16
-rw-r--r--src/libcamera/meson.build2
-rw-r--r--src/libcamera/pipeline/ipu3/ipu3.cpp22
-rw-r--r--src/libcamera/pipeline/raspberrypi/raspberrypi.cpp43
-rw-r--r--src/libcamera/pipeline/rkisp1/rkisp1.cpp39
-rw-r--r--src/libcamera/pipeline/uvcvideo/uvcvideo.cpp211
-rw-r--r--src/libcamera/pipeline/vimc/vimc.cpp2
-rw-r--r--src/libcamera/pipeline_handler.cpp65
-rw-r--r--src/libcamera/request.cpp9
-rw-r--r--src/libcamera/v4l2_device.cpp51
-rw-r--r--src/libcamera/v4l2_subdevice.cpp192
-rw-r--r--src/libcamera/v4l2_videodevice.cpp21
-rw-r--r--src/libcamera/yaml_parser.cpp150
-rwxr-xr-xsrc/py/cam/cam.py8
-rwxr-xr-xsrc/py/examples/simple-cam.py5
-rwxr-xr-xsrc/py/examples/simple-capture.py12
-rwxr-xr-xsrc/py/examples/simple-continuous-capture.py5
-rw-r--r--src/py/libcamera/meson.build5
-rw-r--r--src/py/libcamera/py_camera_manager.cpp131
-rw-r--r--src/py/libcamera/py_camera_manager.h45
-rw-r--r--src/py/libcamera/py_helpers.cpp97
-rw-r--r--src/py/libcamera/py_helpers.h13
-rw-r--r--src/py/libcamera/py_main.cpp205
-rw-r--r--src/py/libcamera/py_main.h14
-rw-r--r--src/qcam/assets/shader/YUV_2_planes.frag29
-rw-r--r--src/qcam/assets/shader/YUV_3_planes.frag27
-rw-r--r--src/qcam/assets/shader/YUV_packed.frag17
-rw-r--r--src/qcam/cam_select_dialog.cpp111
-rw-r--r--src/qcam/cam_select_dialog.h47
-rw-r--r--src/qcam/main_window.cpp82
-rw-r--r--src/qcam/main_window.h12
-rw-r--r--src/qcam/meson.build2
-rw-r--r--src/qcam/viewfinder.h2
-rw-r--r--src/qcam/viewfinder_gl.cpp84
-rw-r--r--src/qcam/viewfinder_gl.h3
-rw-r--r--src/qcam/viewfinder_qt.cpp5
-rw-r--r--src/qcam/viewfinder_qt.h1
95 files changed, 3414 insertions, 1088 deletions
diff --git a/src/cam/capture-script.yaml b/src/cam/capture-script.yaml
index 6a749bc6..7118865e 100644
--- a/src/cam/capture-script.yaml
+++ b/src/cam/capture-script.yaml
@@ -4,6 +4,19 @@
#
# A capture script allows to associate a list of controls and their values
# to frame numbers.
+#
+# The script allows defining a list of frames associated with controls
+# and an optional list of properties that can control the script behaviour.
+
+# properties:
+# # Repeat the controls every 'idx' frames.
+# - loop: idx
+#
+# # List of frame number with associated a list of controls to be applied
+# frames:
+# - frame-number:
+# Control1: value1
+# Control2: value2
# \todo Formally define the capture script structure with a schema
@@ -12,10 +25,16 @@
# libcamera::controls:: enumeration
# - Controls not supported by the camera currently operated are ignored
# - Frame numbers shall be monotonically incrementing, gaps are allowed
+# - If a loop limit is specified, frame numbers in the 'frames' list shall be
+# less than the loop control
+
+# Example: Turn brightness up and down every 460 frames
+
+properties:
+ - loop: 460
-# Example:
frames:
- - 1:
+ - 0:
Brightness: 0.0
- 40:
@@ -44,3 +63,9 @@ frames:
- 340:
Brightness: -0.8
+
+ - 380:
+ Brightness: -0.4
+
+ - 420:
+ Brightness: -0.2
diff --git a/src/cam/capture_script.cpp b/src/cam/capture_script.cpp
index 9f22d5f7..5a27361c 100644
--- a/src/cam/capture_script.cpp
+++ b/src/cam/capture_script.cpp
@@ -15,7 +15,7 @@ using namespace libcamera;
CaptureScript::CaptureScript(std::shared_ptr<Camera> camera,
const std::string &fileName)
- : camera_(camera), valid_(false)
+ : camera_(camera), loop_(0), valid_(false)
{
FILE *fh = fopen(fileName.c_str(), "r");
if (!fh) {
@@ -44,8 +44,13 @@ CaptureScript::CaptureScript(std::shared_ptr<Camera> camera,
const ControlList &CaptureScript::frameControls(unsigned int frame)
{
static ControlList controls{};
+ unsigned int idx = frame;
- auto it = frameControls_.find(frame);
+ /* If we loop, repeat the controls every 'loop_' frames. */
+ if (loop_)
+ idx = frame % loop_;
+
+ auto it = frameControls_.find(idx);
if (it == frameControls_.end())
return controls;
@@ -149,8 +154,14 @@ int CaptureScript::parseScript(FILE *script)
std::string section = eventScalarValue(event);
- if (section == "frames") {
- parseFrames();
+ if (section == "properties") {
+ ret = parseProperties();
+ if (ret)
+ return ret;
+ } else if (section == "frames") {
+ ret = parseFrames();
+ if (ret)
+ return ret;
} else {
std::cerr << "Unsupported section '" << section << "'"
<< std::endl;
@@ -159,6 +170,65 @@ int CaptureScript::parseScript(FILE *script)
}
}
+int CaptureScript::parseProperty()
+{
+ EventPtr event = nextEvent(YAML_MAPPING_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ std::string prop = parseScalar();
+ if (prop.empty())
+ return -EINVAL;
+
+ if (prop == "loop") {
+ event = nextEvent();
+ if (!event)
+ return -EINVAL;
+
+ std::string value = eventScalarValue(event);
+ if (value.empty())
+ return -EINVAL;
+
+ loop_ = atoi(value.c_str());
+ if (!loop_) {
+ std::cerr << "Invalid loop limit '" << loop_ << "'"
+ << std::endl;
+ return -EINVAL;
+ }
+ } else {
+ std::cerr << "Unsupported property '" << prop << "'" << std::endl;
+ return -EINVAL;
+ }
+
+ event = nextEvent(YAML_MAPPING_END_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ return 0;
+}
+
+int CaptureScript::parseProperties()
+{
+ EventPtr event = nextEvent(YAML_SEQUENCE_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ while (1) {
+ if (event->type == YAML_SEQUENCE_END_EVENT)
+ return 0;
+
+ int ret = parseProperty();
+ if (ret)
+ return ret;
+
+ event = nextEvent();
+ if (!event)
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
int CaptureScript::parseFrames()
{
EventPtr event = nextEvent(YAML_SEQUENCE_START_EVENT);
@@ -189,6 +259,12 @@ int CaptureScript::parseFrame(EventPtr event)
return -EINVAL;
unsigned int frameId = atoi(key.c_str());
+ if (loop_ && frameId >= loop_) {
+ std::cerr
+ << "Frame id (" << frameId << ") shall be smaller than"
+ << "loop limit (" << loop_ << ")" << std::endl;
+ return -EINVAL;
+ }
event = nextEvent(YAML_MAPPING_START_EVENT);
if (!event)
@@ -232,12 +308,15 @@ int CaptureScript::parseControl(EventPtr event, ControlList &controls)
return -EINVAL;
}
- std::string value = parseScalar();
- if (value.empty())
+ const ControlId *controlId = it->second;
+
+ ControlValue val = unpackControl(controlId);
+ if (val.isNone()) {
+ std::cerr << "Error unpacking control '" << name << "'"
+ << std::endl;
return -EINVAL;
+ }
- const ControlId *controlId = it->second;
- ControlValue val = unpackControl(controlId, value);
controls.set(controlId->id(), val);
return 0;
@@ -252,6 +331,104 @@ std::string CaptureScript::parseScalar()
return eventScalarValue(event);
}
+ControlValue CaptureScript::parseRectangles()
+{
+ std::vector<libcamera::Rectangle> rectangles;
+
+ std::vector<std::vector<std::string>> arrays = parseArrays();
+ if (arrays.empty())
+ return {};
+
+ for (const std::vector<std::string> &values : arrays) {
+ if (values.size() != 4) {
+ std::cerr << "Error parsing Rectangle: expected "
+ << "array with 4 parameters" << std::endl;
+ return {};
+ }
+
+ Rectangle rect = unpackRectangle(values);
+ rectangles.push_back(rect);
+ }
+
+ ControlValue controlValue;
+ controlValue.set(Span<const Rectangle>(rectangles));
+
+ return controlValue;
+}
+
+std::vector<std::vector<std::string>> CaptureScript::parseArrays()
+{
+ EventPtr event = nextEvent(YAML_SEQUENCE_START_EVENT);
+ if (!event)
+ return {};
+
+ event = nextEvent();
+ if (!event)
+ return {};
+
+ std::vector<std::vector<std::string>> valueArrays;
+
+ /* Parse single array. */
+ if (event->type == YAML_SCALAR_EVENT) {
+ std::string firstValue = eventScalarValue(event);
+ if (firstValue.empty())
+ return {};
+
+ std::vector<std::string> remaining = parseSingleArray();
+
+ std::vector<std::string> values = { firstValue };
+ values.insert(std::end(values),
+ std::begin(remaining), std::end(remaining));
+ valueArrays.push_back(values);
+
+ return valueArrays;
+ }
+
+ /* Parse array of arrays. */
+ while (1) {
+ switch (event->type) {
+ case YAML_SEQUENCE_START_EVENT: {
+ std::vector<std::string> values = parseSingleArray();
+ valueArrays.push_back(values);
+ break;
+ }
+ case YAML_SEQUENCE_END_EVENT:
+ return valueArrays;
+ default:
+ return {};
+ }
+
+ event = nextEvent();
+ if (!event)
+ return {};
+ }
+}
+
+std::vector<std::string> CaptureScript::parseSingleArray()
+{
+ std::vector<std::string> values;
+
+ while (1) {
+ EventPtr event = nextEvent();
+ if (!event)
+ return {};
+
+ switch (event->type) {
+ case YAML_SCALAR_EVENT: {
+ std::string value = eventScalarValue(event);
+ if (value.empty())
+ return {};
+ values.push_back(value);
+ break;
+ }
+ case YAML_SEQUENCE_END_EVENT:
+ return values;
+ default:
+ return {};
+ }
+ }
+}
+
void CaptureScript::unpackFailure(const ControlId *id, const std::string &repr)
{
static const std::map<unsigned int, const char *> typeNames = {
@@ -277,9 +454,24 @@ void CaptureScript::unpackFailure(const ControlId *id, const std::string &repr)
<< typeName << " control " << id->name() << std::endl;
}
-ControlValue CaptureScript::unpackControl(const ControlId *id,
- const std::string &repr)
+ControlValue CaptureScript::unpackControl(const ControlId *id)
{
+ /* Parse complex types. */
+ switch (id->type()) {
+ case ControlTypeRectangle:
+ return parseRectangles();
+ case ControlTypeSize:
+ /* \todo Parse Sizes. */
+ return {};
+ default:
+ break;
+ }
+
+ /* Parse basic types represented by a single scalar. */
+ const std::string repr = parseScalar();
+ if (repr.empty())
+ return {};
+
ControlValue value{};
switch (id->type()) {
@@ -324,13 +516,20 @@ ControlValue CaptureScript::unpackControl(const ControlId *id,
value.set<std::string>(repr);
break;
}
- case ControlTypeRectangle:
- /* \todo Parse rectangles. */
- break;
- case ControlTypeSize:
- /* \todo Parse Sizes. */
+ default:
+ std::cerr << "Unsupported control type" << std::endl;
break;
}
return value;
}
+
+libcamera::Rectangle CaptureScript::unpackRectangle(const std::vector<std::string> &strVec)
+{
+ int x = strtol(strVec[0].c_str(), NULL, 10);
+ int y = strtol(strVec[1].c_str(), NULL, 10);
+ unsigned int width = strtoul(strVec[2].c_str(), NULL, 10);
+ unsigned int height = strtoul(strVec[3].c_str(), NULL, 10);
+
+ return Rectangle(x, y, width, height);
+}
diff --git a/src/cam/capture_script.h b/src/cam/capture_script.h
index 8b4f8f62..7a0ddebb 100644
--- a/src/cam/capture_script.h
+++ b/src/cam/capture_script.h
@@ -40,6 +40,7 @@ private:
std::map<unsigned int, libcamera::ControlList> frameControls_;
std::shared_ptr<libcamera::Camera> camera_;
yaml_parser_t parser_;
+ unsigned int loop_;
bool valid_;
EventPtr nextEvent(yaml_event_type_t expectedType = YAML_NO_EVENT);
@@ -49,14 +50,19 @@ private:
int parseScript(FILE *script);
+ int parseProperties();
+ int parseProperty();
int parseFrames();
int parseFrame(EventPtr event);
int parseControl(EventPtr event, libcamera::ControlList &controls);
std::string parseScalar();
+ libcamera::ControlValue parseRectangles();
+ std::vector<std::vector<std::string>> parseArrays();
+ std::vector<std::string> parseSingleArray();
void unpackFailure(const libcamera::ControlId *id,
const std::string &repr);
- libcamera::ControlValue unpackControl(const libcamera::ControlId *id,
- const std::string &repr);
+ libcamera::ControlValue unpackControl(const libcamera::ControlId *id);
+ libcamera::Rectangle unpackRectangle(const std::vector<std::string> &strVec);
};
diff --git a/src/cam/drm.cpp b/src/cam/drm.cpp
index b0602c94..2e4d7985 100644
--- a/src/cam/drm.cpp
+++ b/src/cam/drm.cpp
@@ -430,7 +430,8 @@ int Device::init()
int Device::openCard()
{
const std::string dirName = "/dev/dri/";
- int ret = -ENOENT;
+ bool found = false;
+ int ret;
/*
* Open the first DRM/KMS device beginning with /dev/dri/card. The
@@ -449,24 +450,42 @@ int Device::openCard()
}
for (struct dirent *res; (res = readdir(folder));) {
+ uint64_t cap;
+
if (strncmp(res->d_name, "card", 4))
continue;
const std::string devName = dirName + res->d_name;
fd_ = open(devName.c_str(), O_RDWR | O_CLOEXEC);
- if (fd_ >= 0) {
- ret = 0;
- break;
+ if (fd_ < 0) {
+ ret = -errno;
+ std::cerr << "Failed to open DRM/KMS device " << devName << ": "
+ << strerror(-ret) << std::endl;
+ continue;
}
- ret = -errno;
- std::cerr << "Failed to open DRM/KMS device " << devName << ": "
- << strerror(-ret) << std::endl;
+ /*
+ * Skip devices that don't support the modeset API, to avoid
+ * selecting a DRM device corresponding to a GPU. There is no
+ * modeset capability, but the kernel returns an error for most
+ * caps if mode setting isn't support by the driver. The
+ * DRM_CAP_DUMB_BUFFER capability is one of those, other would
+ * do as well. The capability value itself isn't relevant.
+ */
+ ret = drmGetCap(fd_, DRM_CAP_DUMB_BUFFER, &cap);
+ if (ret < 0) {
+ drmClose(fd_);
+ fd_ = -1;
+ continue;
+ }
+
+ found = true;
+ break;
}
closedir(folder);
- return ret;
+ return found ? 0 : -ENOENT;
}
int Device::getResources()
diff --git a/src/cam/stream_options.cpp b/src/cam/stream_options.cpp
index a68135a9..3a5625f5 100644
--- a/src/cam/stream_options.cpp
+++ b/src/cam/stream_options.cpp
@@ -8,6 +8,8 @@
#include <iostream>
+#include <libcamera/color_space.h>
+
using namespace libcamera;
StreamKeyValueParser::StreamKeyValueParser()
@@ -21,6 +23,8 @@ StreamKeyValueParser::StreamKeyValueParser()
ArgumentRequired);
addOption("pixelformat", OptionString, "Pixel format name",
ArgumentRequired);
+ addOption("colorspace", OptionString, "Color space",
+ ArgumentRequired);
}
KeyValueParser::Options StreamKeyValueParser::parse(const char *arguments)
@@ -96,6 +100,9 @@ int StreamKeyValueParser::updateConfiguration(CameraConfiguration *config,
if (opts.isSet("pixelformat"))
cfg.pixelFormat = PixelFormat::fromString(opts["pixelformat"].toString());
+
+ if (opts.isSet("colorspace"))
+ cfg.colorSpace = ColorSpace::fromString(opts["colorspace"].toString());
}
return 0;
diff --git a/src/gstreamer/gstlibcamera-utils.cpp b/src/gstreamer/gstlibcamera-utils.cpp
index c97c0d43..244a4a79 100644
--- a/src/gstreamer/gstlibcamera-utils.cpp
+++ b/src/gstreamer/gstlibcamera-utils.cpp
@@ -19,9 +19,21 @@ static struct {
/* Compressed */
{ GST_VIDEO_FORMAT_ENCODED, formats::MJPEG },
- /* RGB */
+ /* RGB16 */
+ { GST_VIDEO_FORMAT_RGB16, formats::RGB565 },
+
+ /* RGB24 */
{ GST_VIDEO_FORMAT_RGB, formats::BGR888 },
{ GST_VIDEO_FORMAT_BGR, formats::RGB888 },
+
+ /* RGB32 */
+ { GST_VIDEO_FORMAT_BGRx, formats::XRGB8888 },
+ { GST_VIDEO_FORMAT_RGBx, formats::XBGR8888 },
+ { GST_VIDEO_FORMAT_xBGR, formats::RGBX8888 },
+ { GST_VIDEO_FORMAT_xRGB, formats::BGRX8888 },
+ { GST_VIDEO_FORMAT_BGRA, formats::ARGB8888 },
+ { GST_VIDEO_FORMAT_RGBA, formats::ABGR8888 },
+ { GST_VIDEO_FORMAT_ABGR, formats::RGBA8888 },
{ GST_VIDEO_FORMAT_ARGB, formats::BGRA8888 },
/* YUV Semiplanar */
@@ -45,6 +57,154 @@ static struct {
/* \todo NV42 is used in libcamera but is not mapped in GStreamer yet. */
};
+static GstVideoColorimetry
+colorimetry_from_colorspace(const ColorSpace &colorSpace)
+{
+ GstVideoColorimetry colorimetry;
+
+ switch (colorSpace.primaries) {
+ case ColorSpace::Primaries::Raw:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+ break;
+ case ColorSpace::Primaries::Smpte170m:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
+ break;
+ case ColorSpace::Primaries::Rec709:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
+ break;
+ case ColorSpace::Primaries::Rec2020:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
+ break;
+ }
+
+ switch (colorSpace.transferFunction) {
+ case ColorSpace::TransferFunction::Linear:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10;
+ break;
+ case ColorSpace::TransferFunction::Srgb:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_SRGB;
+ break;
+ case ColorSpace::TransferFunction::Rec709:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_BT709;
+ break;
+ }
+
+ switch (colorSpace.ycbcrEncoding) {
+ case ColorSpace::YcbcrEncoding::None:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
+ break;
+ case ColorSpace::YcbcrEncoding::Rec601:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ break;
+ case ColorSpace::YcbcrEncoding::Rec709:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709;
+ break;
+ case ColorSpace::YcbcrEncoding::Rec2020:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
+ break;
+ }
+
+ switch (colorSpace.range) {
+ case ColorSpace::Range::Full:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
+ break;
+ case ColorSpace::Range::Limited:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235;
+ break;
+ }
+
+ return colorimetry;
+}
+
+static std::optional<ColorSpace>
+colorspace_from_colorimetry(const GstVideoColorimetry &colorimetry)
+{
+ std::optional<ColorSpace> colorspace = ColorSpace::Raw;
+
+ switch (colorimetry.primaries) {
+ case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
+ /* Unknown primaries map to raw colorspace in gstreamer */
+ return ColorSpace::Raw;
+ case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
+ colorspace->primaries = ColorSpace::Primaries::Smpte170m;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT709:
+ colorspace->primaries = ColorSpace::Primaries::Rec709;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT2020:
+ colorspace->primaries = ColorSpace::Primaries::Rec2020;
+ break;
+ default:
+ GST_WARNING("Colorimetry primaries %d not mapped in gstlibcamera",
+ colorimetry.primaries);
+ return std::nullopt;
+ }
+
+ switch (colorimetry.transfer) {
+ /* Transfer function mappings inspired from v4l2src plugin */
+ case GST_VIDEO_TRANSFER_GAMMA18:
+ case GST_VIDEO_TRANSFER_GAMMA20:
+ case GST_VIDEO_TRANSFER_GAMMA22:
+ case GST_VIDEO_TRANSFER_GAMMA28:
+ GST_WARNING("GAMMA 18, 20, 22, 28 transfer functions not supported");
+ /* fallthrough */
+ case GST_VIDEO_TRANSFER_GAMMA10:
+ colorspace->transferFunction = ColorSpace::TransferFunction::Linear;
+ break;
+ case GST_VIDEO_TRANSFER_SRGB:
+ colorspace->transferFunction = ColorSpace::TransferFunction::Srgb;
+ break;
+#if GST_CHECK_VERSION(1, 18, 0)
+ case GST_VIDEO_TRANSFER_BT601:
+ case GST_VIDEO_TRANSFER_BT2020_10:
+#endif
+ case GST_VIDEO_TRANSFER_BT2020_12:
+ case GST_VIDEO_TRANSFER_BT709:
+ colorspace->transferFunction = ColorSpace::TransferFunction::Rec709;
+ break;
+ default:
+ GST_WARNING("Colorimetry transfer function %d not mapped in gstlibcamera",
+ colorimetry.transfer);
+ return std::nullopt;
+ }
+
+ switch (colorimetry.matrix) {
+ case GST_VIDEO_COLOR_MATRIX_RGB:
+ colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::None;
+ break;
+ /* FCC is about the same as BT601 with less digit */
+ case GST_VIDEO_COLOR_MATRIX_FCC:
+ case GST_VIDEO_COLOR_MATRIX_BT601:
+ colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec601;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT709:
+ colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec709;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT2020:
+ colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec2020;
+ break;
+ default:
+ GST_WARNING("Colorimetry matrix %d not mapped in gstlibcamera",
+ colorimetry.matrix);
+ return std::nullopt;
+ }
+
+ switch (colorimetry.range) {
+ case GST_VIDEO_COLOR_RANGE_0_255:
+ colorspace->range = ColorSpace::Range::Full;
+ break;
+ case GST_VIDEO_COLOR_RANGE_16_235:
+ colorspace->range = ColorSpace::Range::Limited;
+ break;
+ default:
+ GST_WARNING("Colorimetry range %d not mapped in gstlibcamera",
+ colorimetry.range);
+ return std::nullopt;
+ }
+
+ return colorspace;
+}
+
static GstVideoFormat
pixel_format_to_gst_format(const PixelFormat &format)
{
@@ -139,6 +299,18 @@ gst_libcamera_stream_configuration_to_caps(const StreamConfiguration &stream_cfg
"width", G_TYPE_INT, stream_cfg.size.width,
"height", G_TYPE_INT, stream_cfg.size.height,
nullptr);
+
+ if (stream_cfg.colorSpace) {
+ GstVideoColorimetry colorimetry = colorimetry_from_colorspace(stream_cfg.colorSpace.value());
+ gchar *colorimetry_str = gst_video_colorimetry_to_string(&colorimetry);
+
+ if (colorimetry_str)
+ gst_structure_set(s, "colorimetry", G_TYPE_STRING, colorimetry_str, nullptr);
+ else
+ g_error("Got invalid colorimetry from ColorSpace: %s",
+ ColorSpace::toString(stream_cfg.colorSpace).c_str());
+ }
+
gst_caps_append_structure(caps, s);
return caps;
@@ -222,6 +394,17 @@ gst_libcamera_configure_stream_from_caps(StreamConfiguration &stream_cfg,
gst_structure_get_int(s, "height", &height);
stream_cfg.size.width = width;
stream_cfg.size.height = height;
+
+ /* Configure colorimetry */
+ if (gst_structure_has_field(s, "colorimetry")) {
+ const gchar *colorimetry_str = gst_structure_get_string(s, "colorimetry");
+ GstVideoColorimetry colorimetry;
+
+ if (!gst_video_colorimetry_from_string(&colorimetry, colorimetry_str))
+ g_critical("Invalid colorimetry %s", colorimetry_str);
+
+ stream_cfg.colorSpace = colorspace_from_colorimetry(colorimetry);
+ }
}
#if !GST_CHECK_VERSION(1, 17, 1)
diff --git a/src/gstreamer/meson.build b/src/gstreamer/meson.build
index 77c79140..eda246d7 100644
--- a/src/gstreamer/meson.build
+++ b/src/gstreamer/meson.build
@@ -42,7 +42,7 @@ endif
libcamera_gst = shared_library('gstlibcamera',
libcamera_gst_sources,
cpp_args : libcamera_gst_cpp_args,
- dependencies : [libcamera_public, gstvideo_dep, gstallocator_dep],
+ dependencies : [libcamera_private, gstvideo_dep, gstallocator_dep],
install: true,
install_dir : '@0@/gstreamer-1.0'.format(get_option('libdir')),
)
diff --git a/src/ipa/ipu3/algorithms/af.cpp b/src/ipa/ipu3/algorithms/af.cpp
index 4835a034..b00a34f9 100644
--- a/src/ipa/ipu3/algorithms/af.cpp
+++ b/src/ipa/ipu3/algorithms/af.cpp
@@ -116,7 +116,10 @@ Af::Af()
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void Af::prepare(IPAContext &context, ipu3_uapi_params *params)
+void Af::prepare(IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ ipu3_uapi_params *params)
{
const struct ipu3_uapi_grid_config &grid = context.configuration.af.afGrid;
params->acc_param.af.grid_cfg = grid;
@@ -196,10 +199,10 @@ int Af::configure(IPAContext &context, const IPAConfigInfo &configInfo)
/**
* \brief AF coarse scan
- *
- * Find a near focused image using a coarse step. The step is determined by coarseSearchStep.
- *
* \param[in] context The shared IPA context
+ *
+ * Find a near focused image using a coarse step. The step is determined by
+ * kCoarseSearchStep.
*/
void Af::afCoarseScan(IPAContext &context)
{
@@ -223,10 +226,9 @@ void Af::afCoarseScan(IPAContext &context)
/**
* \brief AF fine scan
+ * \param[in] context The shared IPA context
*
* Find an optimum lens position with moving 1 step for each search.
- *
- * \param[in] context The shared IPA context
*/
void Af::afFineScan(IPAContext &context)
{
@@ -244,10 +246,9 @@ void Af::afFineScan(IPAContext &context)
/**
* \brief AF reset
+ * \param[in] context The shared IPA context
*
* Reset all the parameters to start over the AF process.
- *
- * \param[in] context The shared IPA context
*/
void Af::afReset(IPAContext &context)
{
@@ -268,7 +269,7 @@ void Af::afReset(IPAContext &context)
/**
* \brief AF variance comparison.
* \param[in] context The IPA context
- * \param min_step The VCM movement step.
+ * \param[in] min_step The VCM movement step.
*
* We always pick the largest variance to replace the previous one. The image
* with a larger variance also indicates it is a clearer image than previous
@@ -343,9 +344,8 @@ void Af::afIgnoreFrameReset()
/**
* \brief Estimate variance
- * \param y_item The AF filter data set from the IPU3 statistics buffer
- * \param len The quantity of table item entries which are valid to process
- * \param isY1 Selects between filter Y1 or Y2 to calculate the variance
+ * \param[in] y_items The AF filter data set from the IPU3 statistics buffer
+ * \param[in] isY1 Selects between filter Y1 or Y2 to calculate the variance
*
* Calculate the mean of the data set provided by \a y_item, and then calculate
* the variance of that data set from the mean.
@@ -378,15 +378,15 @@ double Af::afEstimateVariance(Span<const y_table_item_t> y_items, bool isY1)
/**
* \brief Determine out-of-focus situation.
- * \param context The IPA context.
+ * \param[in] context The IPA context.
*
* Out-of-focus means that the variance change rate for a focused and a new
* variance is greater than a threshold.
*
* \return True if the variance threshold is crossed indicating lost focus,
- * false otherwise.
+ * false otherwise
*/
-bool Af::afIsOutOfFocus(IPAContext context)
+bool Af::afIsOutOfFocus(IPAContext &context)
{
const uint32_t diff_var = std::abs(currentVariance_ -
context.activeState.af.maxVariance);
@@ -406,6 +406,7 @@ bool Af::afIsOutOfFocus(IPAContext context)
/**
* \brief Determine the max contrast image and lens position.
* \param[in] context The IPA context.
+ * \param[in] frame The frame context sequence number
* \param[in] frameContext The current frame context
* \param[in] stats The statistics buffer of IPU3.
*
@@ -420,7 +421,8 @@ bool Af::afIsOutOfFocus(IPAContext context)
*
* [1] Hill Climbing Algorithm, https://en.wikipedia.org/wiki/Hill_climbing
*/
-void Af::process(IPAContext &context, [[maybe_unused]] IPAFrameContext *frameContext,
+void Af::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
const ipu3_uapi_stats_3a *stats)
{
/* Evaluate the AF buffer length */
diff --git a/src/ipa/ipu3/algorithms/af.h b/src/ipa/ipu3/algorithms/af.h
index ccf015f3..89d37ac1 100644
--- a/src/ipa/ipu3/algorithms/af.h
+++ b/src/ipa/ipu3/algorithms/af.h
@@ -30,9 +30,12 @@ public:
Af();
~Af() = default;
- void prepare(IPAContext &context, ipu3_uapi_params *params) override;
int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
- void process(IPAContext &context, IPAFrameContext *frameContext,
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ ipu3_uapi_params *params) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
const ipu3_uapi_stats_3a *stats) override;
private:
@@ -44,7 +47,7 @@ private:
void afIgnoreFrameReset();
double afEstimateVariance(Span<const y_table_item_t> y_items, bool isY1);
- bool afIsOutOfFocus(IPAContext context);
+ bool afIsOutOfFocus(IPAContext &context);
/* VCM step configuration. It is the current setting of the VCM step. */
uint32_t focus_;
diff --git a/src/ipa/ipu3/algorithms/agc.cpp b/src/ipa/ipu3/algorithms/agc.cpp
index ed4809d9..a1a3c38f 100644
--- a/src/ipa/ipu3/algorithms/agc.cpp
+++ b/src/ipa/ipu3/algorithms/agc.cpp
@@ -183,13 +183,13 @@ utils::Duration Agc::filterExposure(utils::Duration exposureValue)
* \param[in] yGain The gain calculated based on the relative luminance target
* \param[in] iqMeanGain The gain calculated based on the relative luminance target
*/
-void Agc::computeExposure(IPAContext &context, IPAFrameContext *frameContext,
+void Agc::computeExposure(IPAContext &context, IPAFrameContext &frameContext,
double yGain, double iqMeanGain)
{
const IPASessionConfiguration &configuration = context.configuration;
/* Get the effective exposure and gain applied on the sensor. */
- uint32_t exposure = frameContext->sensor.exposure;
- double analogueGain = frameContext->sensor.gain;
+ uint32_t exposure = frameContext.sensor.exposure;
+ double analogueGain = frameContext.sensor.gain;
/* Use the highest of the two gain estimates. */
double evGain = std::max(yGain, iqMeanGain);
@@ -317,13 +317,15 @@ double Agc::estimateLuminance(IPAActiveState &activeState,
/**
* \brief Process IPU3 statistics, and run AGC operations
* \param[in] context The shared IPA context
+ * \param[in] frame The current frame sequence number
* \param[in] frameContext The current frame context
* \param[in] stats The IPU3 statistics and ISP results
*
* Identify the current image brightness, and use that to estimate the optimal
* new exposure and gain for the scene.
*/
-void Agc::process(IPAContext &context, [[maybe_unused]] IPAFrameContext *frameContext,
+void Agc::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
const ipu3_uapi_stats_3a *stats)
{
/*
diff --git a/src/ipa/ipu3/algorithms/agc.h b/src/ipa/ipu3/algorithms/agc.h
index 105ae0f2..59b4b984 100644
--- a/src/ipa/ipu3/algorithms/agc.h
+++ b/src/ipa/ipu3/algorithms/agc.h
@@ -28,14 +28,15 @@ public:
~Agc() = default;
int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
- void process(IPAContext &context, IPAFrameContext *frameContext,
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
const ipu3_uapi_stats_3a *stats) override;
private:
double measureBrightness(const ipu3_uapi_stats_3a *stats,
const ipu3_uapi_grid_config &grid) const;
utils::Duration filterExposure(utils::Duration currentExposure);
- void computeExposure(IPAContext &context, IPAFrameContext *frameContext,
+ void computeExposure(IPAContext &context, IPAFrameContext &frameContext,
double yGain, double iqMeanGain);
double estimateLuminance(IPAActiveState &activeState,
const ipu3_uapi_grid_config &grid,
diff --git a/src/ipa/ipu3/algorithms/awb.cpp b/src/ipa/ipu3/algorithms/awb.cpp
index b658ee54..0dbd7d4c 100644
--- a/src/ipa/ipu3/algorithms/awb.cpp
+++ b/src/ipa/ipu3/algorithms/awb.cpp
@@ -387,7 +387,8 @@ void Awb::calculateWBGains(const ipu3_uapi_stats_3a *stats)
/**
* \copydoc libcamera::ipa::Algorithm::process
*/
-void Awb::process(IPAContext &context, [[maybe_unused]] IPAFrameContext *frameContext,
+void Awb::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
const ipu3_uapi_stats_3a *stats)
{
calculateWBGains(stats);
@@ -429,7 +430,10 @@ constexpr uint16_t Awb::gainValue(double gain)
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void Awb::prepare(IPAContext &context, ipu3_uapi_params *params)
+void Awb::prepare(IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ ipu3_uapi_params *params)
{
/*
* Green saturation thresholds are reduced because we are using the
diff --git a/src/ipa/ipu3/algorithms/awb.h b/src/ipa/ipu3/algorithms/awb.h
index 0acd2148..28e2d38a 100644
--- a/src/ipa/ipu3/algorithms/awb.h
+++ b/src/ipa/ipu3/algorithms/awb.h
@@ -39,8 +39,11 @@ public:
~Awb();
int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
- void prepare(IPAContext &context, ipu3_uapi_params *params) override;
- void process(IPAContext &context, IPAFrameContext *frameContext,
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ ipu3_uapi_params *params) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
const ipu3_uapi_stats_3a *stats) override;
private:
diff --git a/src/ipa/ipu3/algorithms/blc.cpp b/src/ipa/ipu3/algorithms/blc.cpp
index c561aa85..e838072a 100644
--- a/src/ipa/ipu3/algorithms/blc.cpp
+++ b/src/ipa/ipu3/algorithms/blc.cpp
@@ -38,14 +38,18 @@ BlackLevelCorrection::BlackLevelCorrection()
/**
* \brief Fill in the parameter structure, and enable black level correction
- * \param context The shared IPA context
- * \param params The IPU3 parameters
+ * \param[in] context The shared IPA context
+ * \param[in] frame The frame context sequence number
+ * \param[in] frameContext The FrameContext for this frame
+ * \param[out] params The IPU3 parameters
*
* Populate the IPU3 parameter structure with the correction values for each
* channel and enable the corresponding ImgU block processing.
*/
void BlackLevelCorrection::prepare([[maybe_unused]] IPAContext &context,
- ipu3_uapi_params *params)
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ ipu3_uapi_params *params)
{
/*
* The Optical Black Level correction values
diff --git a/src/ipa/ipu3/algorithms/blc.h b/src/ipa/ipu3/algorithms/blc.h
index d8da1748..292bf67b 100644
--- a/src/ipa/ipu3/algorithms/blc.h
+++ b/src/ipa/ipu3/algorithms/blc.h
@@ -18,7 +18,9 @@ class BlackLevelCorrection : public Algorithm
public:
BlackLevelCorrection();
- void prepare(IPAContext &context, ipu3_uapi_params *params) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ ipu3_uapi_params *params) override;
};
} /* namespace ipa::ipu3::algorithms */
diff --git a/src/ipa/ipu3/algorithms/tone_mapping.cpp b/src/ipa/ipu3/algorithms/tone_mapping.cpp
index 49a5558b..eac3d406 100644
--- a/src/ipa/ipu3/algorithms/tone_mapping.cpp
+++ b/src/ipa/ipu3/algorithms/tone_mapping.cpp
@@ -49,13 +49,17 @@ int ToneMapping::configure(IPAContext &context,
/**
* \brief Fill in the parameter structure, and enable gamma control
- * \param context The shared IPA context
- * \param params The IPU3 parameters
+ * \param[in] context The shared IPA context
+ * \param[in] frame The frame context sequence number
+ * \param[in] frameContext The FrameContext for this frame
+ * \param[out] params The IPU3 parameters
*
* Populate the IPU3 parameter structure with our tone mapping look up table and
* enable the gamma control module in the processing blocks.
*/
void ToneMapping::prepare([[maybe_unused]] IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
ipu3_uapi_params *params)
{
/* Copy the calculated LUT into the parameters buffer. */
@@ -71,14 +75,16 @@ void ToneMapping::prepare([[maybe_unused]] IPAContext &context,
/**
* \brief Calculate the tone mapping look up table
- * \param context The shared IPA context
- * \param frameContext The current frame context
- * \param stats The IPU3 statistics and ISP results
+ * \param[in] context The shared IPA context
+ * \param[in] frame The current frame sequence number
+ * \param[in] frameContext The current frame context
+ * \param[in] stats The IPU3 statistics and ISP results
*
* The tone mapping look up table is generated as an inverse power curve from
* our gamma setting.
*/
-void ToneMapping::process(IPAContext &context, [[maybe_unused]] IPAFrameContext *frameContext,
+void ToneMapping::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
[[maybe_unused]] const ipu3_uapi_stats_3a *stats)
{
/*
diff --git a/src/ipa/ipu3/algorithms/tone_mapping.h b/src/ipa/ipu3/algorithms/tone_mapping.h
index d7d48006..822e5168 100644
--- a/src/ipa/ipu3/algorithms/tone_mapping.h
+++ b/src/ipa/ipu3/algorithms/tone_mapping.h
@@ -19,8 +19,10 @@ public:
ToneMapping();
int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
- void prepare(IPAContext &context, ipu3_uapi_params *params) override;
- void process(IPAContext &context, IPAFrameContext *frameContext,
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext, ipu3_uapi_params *params) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
const ipu3_uapi_stats_3a *stats) override;
private:
diff --git a/src/ipa/ipu3/ipa_context.cpp b/src/ipa/ipu3/ipa_context.cpp
index 13cdb835..bd71b615 100644
--- a/src/ipa/ipu3/ipa_context.cpp
+++ b/src/ipa/ipu3/ipa_context.cpp
@@ -36,22 +36,6 @@ namespace libcamera::ipa::ipu3 {
*/
/**
- * \struct IPAFrameContext
- * \brief Context for a frame
- *
- * The frame context stores data specific to a single frame processed by the
- * IPA. Each frame processed by the IPA has a context associated with it,
- * accessible through the IPAContext structure.
- *
- * Fields in the frame context should reflect values and controls
- * associated with the specific frame as requested by the application, and
- * as configured by the hardware. Fields can be read by algorithms to
- * determine if they should update any specific action for this frame, and
- * finally to update the metadata control lists when the frame is fully
- * completed.
- */
-
-/**
* \struct IPAContext
* \brief Global IPA context data shared between all algorithms
*
@@ -181,25 +165,8 @@ namespace libcamera::ipa::ipu3 {
*/
/**
- * \brief Default constructor for IPAFrameContext
- */
-IPAFrameContext::IPAFrameContext() = default;
-
-/**
- * \brief Construct a IPAFrameContext instance
- */
-IPAFrameContext::IPAFrameContext(uint32_t id, const ControlList &reqControls)
- : frame(id), frameControls(reqControls)
-{
- sensor = {};
-}
-
-/**
- * \var IPAFrameContext::frame
- * \brief The frame number
- *
- * \var IPAFrameContext::frameControls
- * \brief Controls sent in by the application while queuing the request
+ * \struct IPAFrameContext
+ * \brief IPU3-specific FrameContext
*
* \var IPAFrameContext::sensor
* \brief Effective sensor values that were applied for the frame
diff --git a/src/ipa/ipu3/ipa_context.h b/src/ipa/ipu3/ipa_context.h
index 42e11141..36099353 100644
--- a/src/ipa/ipu3/ipa_context.h
+++ b/src/ipa/ipu3/ipa_context.h
@@ -8,22 +8,18 @@
#pragma once
-#include <array>
-
#include <linux/intel-ipu3.h>
#include <libcamera/base/utils.h>
-#include <libcamera/controls.h>
#include <libcamera/geometry.h>
+#include <libipa/fc_queue.h>
+
namespace libcamera {
namespace ipa::ipu3 {
-/* Maximum number of frame contexts to be held */
-static constexpr uint32_t kMaxFrameContexts = 16;
-
struct IPASessionConfiguration {
struct {
ipu3_uapi_grid_config bdsGrid;
@@ -76,24 +72,18 @@ struct IPAActiveState {
} toneMapping;
};
-struct IPAFrameContext {
- IPAFrameContext();
- IPAFrameContext(uint32_t id, const ControlList &reqControls);
-
+struct IPAFrameContext : public FrameContext {
struct {
uint32_t exposure;
double gain;
} sensor;
-
- uint32_t frame;
- ControlList frameControls;
};
struct IPAContext {
IPASessionConfiguration configuration;
IPAActiveState activeState;
- std::array<IPAFrameContext, kMaxFrameContexts> frameContexts;
+ FCQueue<IPAFrameContext> frameContexts;
};
} /* namespace ipa::ipu3 */
diff --git a/src/ipa/ipu3/ipu3.cpp b/src/ipa/ipu3/ipu3.cpp
index e37b2fa0..d1ea081d 100644
--- a/src/ipa/ipu3/ipu3.cpp
+++ b/src/ipa/ipu3/ipu3.cpp
@@ -40,6 +40,8 @@
#include "algorithms/tone_mapping.h"
#include "libipa/camera_sensor_helper.h"
+#include "ipa_context.h"
+
/* Minimum grid width, expressed as a number of cells */
static constexpr uint32_t kMinGridWidth = 16;
/* Maximum grid width, expressed as a number of cells */
@@ -53,6 +55,9 @@ static constexpr uint32_t kMinCellSizeLog2 = 3;
/* log2 of the maximum grid cell width and height, in pixels */
static constexpr uint32_t kMaxCellSizeLog2 = 6;
+/* Maximum number of frame contexts to be held */
+static constexpr uint32_t kMaxFrameContexts = 16;
+
namespace libcamera {
LOG_DEFINE_CATEGORY(IPAIPU3)
@@ -135,6 +140,8 @@ namespace ipa::ipu3 {
class IPAIPU3 : public IPAIPU3Interface, public Module
{
public:
+ IPAIPU3();
+
int init(const IPASettings &settings,
const IPACameraSensorInfo &sensorInfo,
const ControlInfoMap &sensorControls,
@@ -183,6 +190,11 @@ private:
struct IPAContext context_;
};
+IPAIPU3::IPAIPU3()
+ : context_({ {}, {}, { kMaxFrameContexts } })
+{
+}
+
std::string IPAIPU3::logPrefix() const
{
return "ipu3";
@@ -205,6 +217,11 @@ void IPAIPU3::updateSessionConfiguration(const ControlInfoMap &sensorControls)
int32_t minGain = v4l2Gain.min().get<int32_t>();
int32_t maxGain = v4l2Gain.max().get<int32_t>();
+ /* Clear the IPA context before the streaming session. */
+ context_.configuration = {};
+ context_.activeState = {};
+ context_.frameContexts.clear();
+
/*
* When the AGC computes the new exposure values for a frame, it needs
* to know the limits for shutter speed and analogue gain.
@@ -327,7 +344,7 @@ int IPAIPU3::init(const IPASettings &settings,
context_.configuration.sensor.lineDuration = sensorInfo.lineLength * 1.0s / sensorInfo.pixelRate;
/* Load the tuning data file. */
- File file(settings.configurationFile.c_str());
+ File file(settings.configurationFile);
if (!file.open(File::OpenModeFlag::ReadOnly)) {
int ret = file.error();
LOG(IPAIPU3, Error)
@@ -382,6 +399,7 @@ int IPAIPU3::start()
*/
void IPAIPU3::stop()
{
+ context_.frameContexts.clear();
}
/**
@@ -488,11 +506,6 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo,
calculateBdsGrid(configInfo.bdsOutputSize);
- /* Clean IPAActiveState at each reconfiguration. */
- context_.activeState = {};
- IPAFrameContext initFrameContext;
- context_.frameContexts.fill(initFrameContext);
-
if (!validateSensorControls()) {
LOG(IPAIPU3, Error) << "Sensor control validation failed.";
return -EINVAL;
@@ -572,8 +585,10 @@ void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
*/
params->use = {};
+ IPAFrameContext &frameContext = context_.frameContexts.get(frame);
+
for (auto const &algo : algorithms())
- algo->prepare(context_, params);
+ algo->prepare(context_, frame, frameContext, params);
paramsBufferReady.emit(frame);
}
@@ -603,10 +618,7 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame,
const ipu3_uapi_stats_3a *stats =
reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data());
- IPAFrameContext &frameContext = context_.frameContexts[frame % kMaxFrameContexts];
-
- if (frameContext.frame != frame)
- LOG(IPAIPU3, Warning) << "Frame " << frame << " does not match its frame context";
+ IPAFrameContext &frameContext = context_.frameContexts.get(frame);
frameContext.sensor.exposure = sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>();
frameContext.sensor.gain = camHelper_->gain(sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>());
@@ -616,7 +628,7 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame,
ControlList ctrls(controls::controls);
for (auto const &algo : algorithms())
- algo->process(context_, &frameContext, stats);
+ algo->process(context_, frame, frameContext, stats);
setControls(frame);
@@ -651,8 +663,10 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame,
*/
void IPAIPU3::queueRequest(const uint32_t frame, const ControlList &controls)
{
- /* \todo Start processing for 'frame' based on 'controls'. */
- context_.frameContexts[frame % kMaxFrameContexts] = { frame, controls };
+ IPAFrameContext &frameContext = context_.frameContexts.alloc(frame);
+
+ for (auto const &algo : algorithms())
+ algo->queueRequest(context_, frame, frameContext, controls);
}
/**
diff --git a/src/ipa/libipa/algorithm.cpp b/src/ipa/libipa/algorithm.cpp
index 38200e57..c152b885 100644
--- a/src/ipa/libipa/algorithm.cpp
+++ b/src/ipa/libipa/algorithm.cpp
@@ -70,6 +70,8 @@ namespace ipa {
* \fn Algorithm::prepare()
* \brief Fill the \a params buffer with ISP processing parameters for a frame
* \param[in] context The shared IPA context
+ * \param[in] frame The frame context sequence number
+ * \param[in] frameContext The FrameContext for this frame
* \param[out] params The ISP specific parameters.
*
* This function is called for every frame when the camera is running before it
@@ -86,6 +88,7 @@ namespace ipa {
* \brief Provide control values to the algorithm
* \param[in] context The shared IPA context
* \param[in] frame The frame number to apply the control values
+ * \param[in] frameContext The current frame's context
* \param[in] controls The list of user controls
*
* This function is called for each request queued to the camera. It provides
@@ -101,6 +104,7 @@ namespace ipa {
* \fn Algorithm::process()
* \brief Process ISP statistics, and run algorithm operations
* \param[in] context The shared IPA context
+ * \param[in] frame The frame context sequence number
* \param[in] frameContext The current frame's context
* \param[in] stats The IPA statistics and ISP results
*
diff --git a/src/ipa/libipa/algorithm.h b/src/ipa/libipa/algorithm.h
index ccc659a6..d8601f9c 100644
--- a/src/ipa/libipa/algorithm.h
+++ b/src/ipa/libipa/algorithm.h
@@ -7,6 +7,7 @@
#pragma once
#include <memory>
+#include <stdint.h>
#include <string>
#include <libcamera/controls.h>
@@ -38,18 +39,22 @@ public:
}
virtual void prepare([[maybe_unused]] typename Module::Context &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] typename Module::FrameContext &frameContext,
[[maybe_unused]] typename Module::Params *params)
{
}
virtual void queueRequest([[maybe_unused]] typename Module::Context &context,
[[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] typename Module::FrameContext &frameContext,
[[maybe_unused]] const ControlList &controls)
{
}
virtual void process([[maybe_unused]] typename Module::Context &context,
- [[maybe_unused]] typename Module::FrameContext *frameContext,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] typename Module::FrameContext &frameContext,
[[maybe_unused]] const typename Module::Stats *stats)
{
}
diff --git a/src/ipa/libipa/fc_queue.cpp b/src/ipa/libipa/fc_queue.cpp
new file mode 100644
index 00000000..e812faa5
--- /dev/null
+++ b/src/ipa/libipa/fc_queue.cpp
@@ -0,0 +1,140 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Google Inc.
+ *
+ * fc_queue.cpp - IPA Frame context queue
+ */
+
+#include "fc_queue.h"
+
+#include <libcamera/base/log.h>
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(FCQueue)
+
+namespace ipa {
+
+/**
+ * \file fc_queue.h
+ * \brief Queue of per-frame contexts
+ */
+
+/**
+ * \struct FrameContext
+ * \brief Context for a frame
+ *
+ * The frame context stores data specific to a single frame processed by the
+ * IPA module. Each frame processed by the IPA module has a context associated
+ * with it, accessible through the Frame Context Queue.
+ *
+ * Fields in the frame context should reflect values and controls associated
+ * with the specific frame as requested by the application, and as configured by
+ * the hardware. Fields can be read by algorithms to determine if they should
+ * update any specific action for this frame, and finally to update the metadata
+ * control lists when the frame is fully completed.
+ *
+ * \var FrameContext::frame
+ * \brief The frame number
+ */
+
+/**
+ * \class FCQueue
+ * \brief A support class for managing FrameContext instances in IPA modules
+ * \tparam FrameContext The IPA module-specific FrameContext derived class type
+ *
+ * Along with the Module and Algorithm classes, the frame context queue is a
+ * core component of the libipa infrastructure. It stores per-frame contexts
+ * used by the Algorithm operations. By centralizing the lifetime management of
+ * the contexts and implementing safeguards against underflows and overflows, it
+ * simplifies IPA modules and improves their reliability.
+ *
+ * The queue references frame contexts by a monotonically increasing sequence
+ * number. The FCQueue design assumes that this number matches both the sequence
+ * number of the corresponding frame, as generated by the camera sensor, and the
+ * sequence number of the request. This allows IPA modules to obtain the frame
+ * context from any location where a request or a frame is available.
+ *
+ * A frame context normally begins its lifetime when the corresponding request
+ * is queued, way before the frame is captured by the camera sensor. IPA modules
+ * allocate the context from the queue at that point, calling alloc() using the
+ * request number. The queue initializes the context, and the IPA module then
+ * populates it with data from the request. The context can be later retrieved
+ * with a call to get(), typically when the IPA module is requested to provide
+ * sensor or ISP parameters or receives statistics for a frame. The frame number
+ * is used at that point to identify the context.
+ *
+ * If an application fails to queue requests to the camera fast enough, frames
+ * may be produced by the camera sensor and processed by the IPA module without
+ * a corresponding request having been queued to the IPA module. This creates an
+ * underrun condition, where the IPA module will try to get a frame context that
+ * hasn't been allocated. In this case, the get() function will allocate and
+ * initialize a context for the frame, and log a message. Algorithms will not
+ * apply the controls associated with the late request, but should otherwise
+ * behave correctly.
+ *
+ * \todo Mark the frame context with a per-frame control error flag in case of
+ * underrun, and research how algorithms should handle this.
+ *
+ * At its core, the queue uses a circular buffer to avoid dynamic memory
+ * allocation at runtime. The buffer is pre-allocated with a maximum number of
+ * entries when the FCQueue instance is constructed. Entries are initialized on
+ * first use by alloc() or, in underrun conditions, get(). The queue is not
+ * allowed to overflow, which must be ensured by pipeline handlers never
+ * queuing more in-flight requests to the IPA module than the queue size. If an
+ * overflow condition is detected, the queue will log a fatal error.
+ *
+ * IPA module-specific frame context implementations shall inherit from the
+ * FrameContext base class to support the minimum required features for a
+ * FrameContext.
+ */
+
+/**
+ * \fn FCQueue::FCQueue(unsigned int size)
+ * \brief Construct a frame contexts queue of a specified size
+ * \param[in] size The number of contexts in the queue
+ */
+
+/**
+ * \fn FCQueue::clear()
+ * \brief Clear the contexts queue
+ *
+ * IPA modules must clear the frame context queue at the beginning of a new
+ * streaming session, in IPAModule::start().
+ *
+ * \todo Fix any issue this may cause with requests queued before the camera is
+ * started.
+ */
+
+/**
+ * \fn FCQueue::alloc(uint32_t frame)
+ * \brief Allocate and return a FrameContext for the \a frame
+ * \param[in] frame The frame context sequence number
+ *
+ * The first call to obtain a FrameContext from the FCQueue should be handled
+ * through this function. The FrameContext will be initialised, if not
+ * initialised already, and returned to the caller.
+ *
+ * If the FrameContext was already initialized for this \a frame, a warning will
+ * be reported and the previously initialized FrameContext is returned.
+ *
+ * Frame contexts are expected to be initialised when a Request is first passed
+ * to the IPA module in IPAModule::queueRequest().
+ *
+ * \return A reference to the FrameContext for sequence \a frame
+ */
+
+/**
+ * \fn FCQueue::get(uint32_t frame)
+ * \brief Obtain the FrameContext for the \a frame
+ * \param[in] frame The frame context sequence number
+ *
+ * If the FrameContext is not correctly initialised for the \a frame, it will be
+ * initialised.
+ *
+ * \return A reference to the FrameContext for sequence \a frame
+ */
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
diff --git a/src/ipa/libipa/fc_queue.h b/src/ipa/libipa/fc_queue.h
new file mode 100644
index 00000000..a589e7e1
--- /dev/null
+++ b/src/ipa/libipa/fc_queue.h
@@ -0,0 +1,118 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Google Inc.
+ *
+ * fc_queue.h - IPA Frame context queue
+ */
+
+#pragma once
+
+#include <stdint.h>
+#include <vector>
+
+#include <libcamera/base/log.h>
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(FCQueue)
+
+namespace ipa {
+
+template<typename FrameContext>
+class FCQueue;
+
+struct FrameContext {
+private:
+ template<typename T> friend class FCQueue;
+ uint32_t frame;
+};
+
+template<typename FrameContext>
+class FCQueue
+{
+public:
+ FCQueue(unsigned int size)
+ : contexts_(size)
+ {
+ }
+
+ void clear()
+ {
+ for (FrameContext &ctx : contexts_)
+ ctx.frame = 0;
+ }
+
+ FrameContext &alloc(const uint32_t frame)
+ {
+ FrameContext &frameContext = contexts_[frame % contexts_.size()];
+
+ /*
+ * Do not re-initialise if a get() call has already fetched this
+ * frame context to preseve the context.
+ *
+ * \todo If the the sequence number of the context to initialise
+ * is smaller than the sequence number of the queue slot to use,
+ * it means that we had a serious request underrun and more
+ * frames than the queue size has been produced since the last
+ * time the application has queued a request. Does this deserve
+ * an error condition ?
+ */
+ if (frame != 0 && frame <= frameContext.frame)
+ LOG(FCQueue, Warning)
+ << "Frame " << frame << " already initialised";
+ else
+ init(frameContext, frame);
+
+ return frameContext;
+ }
+
+ FrameContext &get(uint32_t frame)
+ {
+ FrameContext &frameContext = contexts_[frame % contexts_.size()];
+
+ /*
+ * If the IPA algorithms try to access a frame context slot which
+ * has been already overwritten by a newer context, it means the
+ * frame context queue has overflowed and the desired context
+ * has been forever lost. The pipeline handler shall avoid
+ * queueing more requests to the IPA than the frame context
+ * queue size.
+ */
+ if (frame < frameContext.frame)
+ LOG(FCQueue, Fatal) << "Frame context for " << frame
+ << " has been overwritten by "
+ << frameContext.frame;
+
+ if (frame == frameContext.frame)
+ return frameContext;
+
+ /*
+ * The frame context has been retrieved before it was
+ * initialised through the initialise() call. This indicates an
+ * algorithm attempted to access a Frame context before it was
+ * queued to the IPA. Controls applied for this request may be
+ * left unhandled.
+ *
+ * \todo Set an error flag for per-frame control errors.
+ */
+ LOG(FCQueue, Warning)
+ << "Obtained an uninitialised FrameContext for " << frame;
+
+ init(frameContext, frame);
+
+ return frameContext;
+ }
+
+private:
+ void init(FrameContext &frameContext, const uint32_t frame)
+ {
+ frameContext = {};
+ frameContext.frame = frame;
+ }
+
+ std::vector<FrameContext> contexts_;
+};
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
diff --git a/src/ipa/libipa/meson.build b/src/ipa/libipa/meson.build
index fb894bc6..016b8e0e 100644
--- a/src/ipa/libipa/meson.build
+++ b/src/ipa/libipa/meson.build
@@ -3,6 +3,7 @@
libipa_headers = files([
'algorithm.h',
'camera_sensor_helper.h',
+ 'fc_queue.h',
'histogram.h',
'module.h',
])
@@ -10,6 +11,7 @@ libipa_headers = files([
libipa_sources = files([
'algorithm.cpp',
'camera_sensor_helper.cpp',
+ 'fc_queue.cpp',
'histogram.cpp',
'module.cpp',
])
diff --git a/src/ipa/meson.build b/src/ipa/meson.build
index 849bb372..76ad5b44 100644
--- a/src/ipa/meson.build
+++ b/src/ipa/meson.build
@@ -27,6 +27,14 @@ ipa_sign = files('ipa-sign.sh')
ipa_names = []
ipa_modules = get_option('ipas')
+
+# Tests require the vimc IPA, similar to vimc pipline-handler for their
+# execution. Include it automatically when tests are enabled.
+if get_option('test') and 'vimc' not in ipa_modules
+ message('Enabling vimc IPA to support tests')
+ ipa_modules += ['vimc']
+endif
+
enabled_ipa_modules = []
# The ipa-sign-install.sh script which uses the ipa_names variable will itself
diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp
index 69c73f8c..8d731435 100644
--- a/src/ipa/raspberrypi/raspberrypi.cpp
+++ b/src/ipa/raspberrypi/raspberrypi.cpp
@@ -49,7 +49,6 @@
#include "geq_status.h"
#include "lux_status.h"
#include "metadata.h"
-#include "noise_status.h"
#include "sharpen_algorithm.h"
#include "sharpen_status.h"
diff --git a/src/ipa/rkisp1/algorithms/agc.cpp b/src/ipa/rkisp1/algorithms/agc.cpp
index a1bb7d97..04062a36 100644
--- a/src/ipa/rkisp1/algorithms/agc.cpp
+++ b/src/ipa/rkisp1/algorithms/agc.cpp
@@ -73,8 +73,8 @@ Agc::Agc()
int Agc::configure(IPAContext &context, const IPACameraSensorInfo &configInfo)
{
/* Configure the default exposure and gain. */
- context.frameContext.agc.gain = std::max(context.configuration.agc.minAnalogueGain, kMinAnalogueGain);
- context.frameContext.agc.exposure = 10ms / context.configuration.sensor.lineDuration;
+ context.activeState.agc.gain = std::max(context.configuration.agc.minAnalogueGain, kMinAnalogueGain);
+ context.activeState.agc.exposure = 10ms / context.configuration.sensor.lineDuration;
/*
* According to the RkISP1 documentation:
@@ -98,7 +98,10 @@ int Agc::configure(IPAContext &context, const IPACameraSensorInfo &configInfo)
context.configuration.agc.measureWindow.h_size = 3 * configInfo.outputSize.width / 4;
context.configuration.agc.measureWindow.v_size = 3 * configInfo.outputSize.height / 4;
- /* \todo Use actual frame index by populating it in the frameContext. */
+ /*
+ * \todo Use the upcoming per-frame context API that will provide a
+ * frame index
+ */
frameCount_ = 0;
return 0;
}
@@ -140,14 +143,16 @@ utils::Duration Agc::filterExposure(utils::Duration exposureValue)
/**
* \brief Estimate the new exposure and gain values
- * \param[inout] frameContext The shared IPA frame Context
+ * \param[inout] context The shared IPA Context
+ * \param[in] frameContext The FrameContext for this frame
* \param[in] yGain The gain calculated on the current brightness level
* \param[in] iqMeanGain The gain calculated based on the relative luminance target
*/
-void Agc::computeExposure(IPAContext &context, double yGain, double iqMeanGain)
+void Agc::computeExposure(IPAContext &context, IPAFrameContext &frameContext,
+ double yGain, double iqMeanGain)
{
IPASessionConfiguration &configuration = context.configuration;
- IPAFrameContext &frameContext = context.frameContext;
+ IPAActiveState &activeState = context.activeState;
/* Get the effective exposure and gain applied on the sensor. */
uint32_t exposure = frameContext.sensor.exposure;
@@ -216,8 +221,8 @@ void Agc::computeExposure(IPAContext &context, double yGain, double iqMeanGain)
<< stepGain;
/* Update the estimated exposure and gain. */
- frameContext.agc.exposure = shutterTime / configuration.sensor.lineDuration;
- frameContext.agc.gain = stepGain;
+ activeState.agc.exposure = shutterTime / configuration.sensor.lineDuration;
+ activeState.agc.gain = stepGain;
}
/**
@@ -275,15 +280,24 @@ double Agc::measureBrightness(const rkisp1_cif_isp_hist_stat *hist) const
/**
* \brief Process RkISP1 statistics, and run AGC operations
* \param[in] context The shared IPA context
+ * \param[in] frame The frame context sequence number
+ * \param[in] frameContext The current frame context
* \param[in] stats The RKISP1 statistics and ISP results
*
* Identify the current image brightness, and use that to estimate the optimal
* new exposure and gain for the scene.
*/
-void Agc::process(IPAContext &context,
- [[maybe_unused]] IPAFrameContext *frameContext,
- const rkisp1_stat_buffer *stats)
+void Agc::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext, const rkisp1_stat_buffer *stats)
{
+ /*
+ * \todo Verify that the exposure and gain applied by the sensor for
+ * this frame match what has been requested. This isn't a hard
+ * requirement for stability of the AGC (the guarantee we need in
+ * automatic mode is a perfect match between the frame and the values
+ * we receive), but is important in manual mode.
+ */
+
const rkisp1_cif_isp_stat *params = &stats->params;
ASSERT(stats->meas_type & RKISP1_CIF_ISP_STAT_AUTOEXP);
@@ -315,16 +329,20 @@ void Agc::process(IPAContext &context,
break;
}
- computeExposure(context, yGain, iqMeanGain);
+ computeExposure(context, frameContext, yGain, iqMeanGain);
frameCount_++;
}
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void Agc::prepare(IPAContext &context, rkisp1_params_cfg *params)
+void Agc::prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext, rkisp1_params_cfg *params)
{
- if (context.frameContext.frameCount > 0)
+ frameContext.agc.exposure = context.activeState.agc.exposure;
+ frameContext.agc.gain = context.activeState.agc.gain;
+
+ if (frame > 0)
return;
/* Configure the measurement window. */
diff --git a/src/ipa/rkisp1/algorithms/agc.h b/src/ipa/rkisp1/algorithms/agc.h
index 1c9818b7..9ad5c32f 100644
--- a/src/ipa/rkisp1/algorithms/agc.h
+++ b/src/ipa/rkisp1/algorithms/agc.h
@@ -26,12 +26,16 @@ public:
~Agc() = default;
int configure(IPAContext &context, const IPACameraSensorInfo &configInfo) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
- void process(IPAContext &context, IPAFrameContext *frameContext,
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
const rkisp1_stat_buffer *stats) override;
private:
- void computeExposure(IPAContext &Context, double yGain, double iqMeanGain);
+ void computeExposure(IPAContext &Context, IPAFrameContext &frameContext,
+ double yGain, double iqMeanGain);
utils::Duration filterExposure(utils::Duration exposureValue);
double estimateLuminance(const rkisp1_cif_isp_ae_stat *ae, double gain);
double measureBrightness(const rkisp1_cif_isp_hist_stat *hist) const;
diff --git a/src/ipa/rkisp1/algorithms/awb.cpp b/src/ipa/rkisp1/algorithms/awb.cpp
index 9f00364d..3349948a 100644
--- a/src/ipa/rkisp1/algorithms/awb.cpp
+++ b/src/ipa/rkisp1/algorithms/awb.cpp
@@ -9,9 +9,11 @@
#include <algorithm>
#include <cmath>
+#include <iomanip>
#include <libcamera/base/log.h>
+#include <libcamera/control_ids.h>
#include <libcamera/ipa/core_ipa_interface.h>
/**
@@ -29,15 +31,27 @@ namespace ipa::rkisp1::algorithms {
LOG_DEFINE_CATEGORY(RkISP1Awb)
+/* Minimum mean value below which AWB can't operate. */
+constexpr double kMeanMinThreshold = 2.0;
+
+Awb::Awb()
+ : rgbMode_(false)
+{
+}
+
/**
* \copydoc libcamera::ipa::Algorithm::configure
*/
int Awb::configure(IPAContext &context,
const IPACameraSensorInfo &configInfo)
{
- context.frameContext.awb.gains.red = 1.0;
- context.frameContext.awb.gains.blue = 1.0;
- context.frameContext.awb.gains.green = 1.0;
+ context.activeState.awb.gains.manual.red = 1.0;
+ context.activeState.awb.gains.manual.blue = 1.0;
+ context.activeState.awb.gains.manual.green = 1.0;
+ context.activeState.awb.gains.automatic.red = 1.0;
+ context.activeState.awb.gains.automatic.blue = 1.0;
+ context.activeState.awb.gains.automatic.green = 1.0;
+ context.activeState.awb.autoEnabled = true;
/*
* Define the measurement window for AWB as a centered rectangle
@@ -48,6 +62,8 @@ int Awb::configure(IPAContext &context,
context.configuration.awb.measureWindow.h_size = 3 * configInfo.outputSize.width / 4;
context.configuration.awb.measureWindow.v_size = 3 * configInfo.outputSize.height / 4;
+ context.configuration.awb.enabled = true;
+
return 0;
}
@@ -70,109 +86,232 @@ uint32_t Awb::estimateCCT(double red, double green, double blue)
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void Awb::prepare(IPAContext &context, rkisp1_params_cfg *params)
+void Awb::prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext, rkisp1_params_cfg *params)
{
- params->others.awb_gain_config.gain_green_b = 256 * context.frameContext.awb.gains.green;
- params->others.awb_gain_config.gain_blue = 256 * context.frameContext.awb.gains.blue;
- params->others.awb_gain_config.gain_red = 256 * context.frameContext.awb.gains.red;
- params->others.awb_gain_config.gain_green_r = 256 * context.frameContext.awb.gains.green;
+ /*
+ * This is the latest time we can read the active state. This is the
+ * most up-to-date automatic values we can read.
+ */
+ if (frameContext.awb.autoEnabled) {
+ frameContext.awb.gains.red = context.activeState.awb.gains.automatic.red;
+ frameContext.awb.gains.green = context.activeState.awb.gains.automatic.green;
+ frameContext.awb.gains.blue = context.activeState.awb.gains.automatic.blue;
+ }
+
+ params->others.awb_gain_config.gain_green_b = 256 * frameContext.awb.gains.green;
+ params->others.awb_gain_config.gain_blue = 256 * frameContext.awb.gains.blue;
+ params->others.awb_gain_config.gain_red = 256 * frameContext.awb.gains.red;
+ params->others.awb_gain_config.gain_green_r = 256 * frameContext.awb.gains.green;
/* Update the gains. */
params->module_cfg_update |= RKISP1_CIF_ISP_MODULE_AWB_GAIN;
- /* If we already have configured the gains and window, return. */
- if (context.frameContext.frameCount > 0)
+ /* If we have already set the AWB measurement parameters, return. */
+ if (frame > 0)
return;
- /* Configure the gains to apply. */
+ rkisp1_cif_isp_awb_meas_config &awb_config = params->meas.awb_meas_config;
+
+ /* Configure the measure window for AWB. */
+ awb_config.awb_wnd = context.configuration.awb.measureWindow;
+
+ /* Number of frames to use to estimate the means (0 means 1 frame). */
+ awb_config.frames = 0;
+
+ /* Select RGB or YCbCr means measurement. */
+ if (rgbMode_) {
+ awb_config.awb_mode = RKISP1_CIF_ISP_AWB_MODE_RGB;
+
+ /*
+ * For RGB-based measurements, pixels are selected with maximum
+ * red, green and blue thresholds that are set in the
+ * awb_ref_cr, awb_min_y and awb_ref_cb respectively. The other
+ * values are not used, set them to 0.
+ */
+ awb_config.awb_ref_cr = 250;
+ awb_config.min_y = 250;
+ awb_config.awb_ref_cb = 250;
+
+ awb_config.max_y = 0;
+ awb_config.min_c = 0;
+ awb_config.max_csum = 0;
+ } else {
+ awb_config.awb_mode = RKISP1_CIF_ISP_AWB_MODE_YCBCR;
+
+ /* Set the reference Cr and Cb (AWB target) to white. */
+ awb_config.awb_ref_cb = 128;
+ awb_config.awb_ref_cr = 128;
+
+ /*
+ * Filter out pixels based on luminance and chrominance values.
+ * The acceptable luma values are specified as a [16, 250]
+ * range, while the acceptable chroma values are specified with
+ * a minimum of 16 and a maximum Cb+Cr sum of 250.
+ */
+ awb_config.min_y = 16;
+ awb_config.max_y = 250;
+ awb_config.min_c = 16;
+ awb_config.max_csum = 250;
+ }
+
+ /* Enable the AWB gains. */
params->module_en_update |= RKISP1_CIF_ISP_MODULE_AWB_GAIN;
- /* Update the ISP to apply the gains configured. */
params->module_ens |= RKISP1_CIF_ISP_MODULE_AWB_GAIN;
- /* Configure the measure window for AWB. */
- params->meas.awb_meas_config.awb_wnd = context.configuration.awb.measureWindow;
- /*
- * Measure Y, Cr and Cb means.
- * \todo RGB is not working, the kernel seems to not configure it ?
- */
- params->meas.awb_meas_config.awb_mode = RKISP1_CIF_ISP_AWB_MODE_YCBCR;
- /* Reference Cr and Cb. */
- params->meas.awb_meas_config.awb_ref_cb = 128;
- params->meas.awb_meas_config.awb_ref_cr = 128;
- /* Y values to include are between min_y and max_y only. */
- params->meas.awb_meas_config.min_y = 16;
- params->meas.awb_meas_config.max_y = 250;
- /* Maximum Cr+Cb value to take into account for awb. */
- params->meas.awb_meas_config.max_csum = 250;
- /* Minimum Cr and Cb values to take into account. */
- params->meas.awb_meas_config.min_c = 16;
- /* Number of frames to use to estimate the mean (0 means 1 frame). */
- params->meas.awb_meas_config.frames = 0;
-
- /* Update AWB measurement unit configuration. */
+ /* Update the AWB measurement parameters and enable the AWB module. */
params->module_cfg_update |= RKISP1_CIF_ISP_MODULE_AWB;
- /* Make sure the ISP is measuring the means for the next frame. */
params->module_en_update |= RKISP1_CIF_ISP_MODULE_AWB;
params->module_ens |= RKISP1_CIF_ISP_MODULE_AWB;
}
/**
+ * \copydoc libcamera::ipa::Algorithm::queueRequest
+ */
+void Awb::queueRequest(IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ControlList &controls)
+{
+ auto &awb = context.activeState.awb;
+
+ const auto &awbEnable = controls.get(controls::AwbEnable);
+ if (awbEnable && *awbEnable != awb.autoEnabled) {
+ awb.autoEnabled = *awbEnable;
+
+ LOG(RkISP1Awb, Debug)
+ << (*awbEnable ? "Enabling" : "Disabling") << " AWB";
+ }
+
+ const auto &colourGains = controls.get(controls::ColourGains);
+ if (colourGains && !awb.autoEnabled) {
+ awb.gains.manual.red = (*colourGains)[0];
+ awb.gains.manual.blue = (*colourGains)[1];
+
+ LOG(RkISP1Awb, Debug)
+ << "Set colour gains to red: " << awb.gains.manual.red
+ << ", blue: " << awb.gains.manual.blue;
+ }
+
+ frameContext.awb.autoEnabled = awb.autoEnabled;
+
+ if (!awb.autoEnabled) {
+ frameContext.awb.gains.red = awb.gains.manual.red;
+ frameContext.awb.gains.green = 1.0;
+ frameContext.awb.gains.blue = awb.gains.manual.blue;
+ }
+}
+
+/**
* \copydoc libcamera::ipa::Algorithm::process
*/
-void Awb::process([[maybe_unused]] IPAContext &context,
- [[maybe_unused]] IPAFrameContext *frameCtx,
+void Awb::process(IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
const rkisp1_stat_buffer *stats)
{
const rkisp1_cif_isp_stat *params = &stats->params;
const rkisp1_cif_isp_awb_stat *awb = &params->awb;
- IPAFrameContext &frameContext = context.frameContext;
+ IPAActiveState &activeState = context.activeState;
+ double greenMean;
+ double redMean;
+ double blueMean;
+
+ if (rgbMode_) {
+ greenMean = awb->awb_mean[0].mean_y_or_g;
+ redMean = awb->awb_mean[0].mean_cr_or_r;
+ blueMean = awb->awb_mean[0].mean_cb_or_b;
+ } else {
+ /* Get the YCbCr mean values */
+ double yMean = awb->awb_mean[0].mean_y_or_g;
+ double cbMean = awb->awb_mean[0].mean_cb_or_b;
+ double crMean = awb->awb_mean[0].mean_cr_or_r;
+
+ /*
+ * Convert from YCbCr to RGB.
+ * The hardware uses the following formulas:
+ * Y = 16 + 0.2500 R + 0.5000 G + 0.1094 B
+ * Cb = 128 - 0.1406 R - 0.2969 G + 0.4375 B
+ * Cr = 128 + 0.4375 R - 0.3750 G - 0.0625 B
+ *
+ * The inverse matrix is thus:
+ * [[1,1636, -0,0623, 1,6008]
+ * [1,1636, -0,4045, -0,7949]
+ * [1,1636, 1,9912, -0,0250]]
+ */
+ yMean -= 16;
+ cbMean -= 128;
+ crMean -= 128;
+ redMean = 1.1636 * yMean - 0.0623 * cbMean + 1.6008 * crMean;
+ greenMean = 1.1636 * yMean - 0.4045 * cbMean - 0.7949 * crMean;
+ blueMean = 1.1636 * yMean + 1.9912 * cbMean - 0.0250 * crMean;
+
+ /*
+ * Due to hardware rounding errors in the YCbCr means, the
+ * calculated RGB means may be negative. This would lead to
+ * negative gains, messing up calculation. Prevent this by
+ * clamping the means to positive values.
+ */
+ redMean = std::max(redMean, 0.0);
+ greenMean = std::max(greenMean, 0.0);
+ blueMean = std::max(blueMean, 0.0);
+ }
- /* Get the YCbCr mean values */
- double yMean = awb->awb_mean[0].mean_y_or_g;
- double crMean = awb->awb_mean[0].mean_cr_or_r;
- double cbMean = awb->awb_mean[0].mean_cb_or_b;
+ /*
+ * The ISP computes the AWB means after applying the colour gains,
+ * divide by the gains that were used to get the raw means from the
+ * sensor.
+ */
+ redMean /= frameContext.awb.gains.red;
+ greenMean /= frameContext.awb.gains.green;
+ blueMean /= frameContext.awb.gains.blue;
/*
- * Convert from YCbCr to RGB.
- * The hardware uses the following formulas:
- * Y = 16 + 0.2500 R + 0.5000 G + 0.1094 B
- * Cb = 128 - 0.1406 R - 0.2969 G + 0.4375 B
- * Cr = 128 + 0.4375 R - 0.3750 G - 0.0625 B
- *
- * The inverse matrix is thus:
- * [[1,1636, -0,0623, 1,6008]
- * [1,1636, -0,4045, -0,7949]
- * [1,1636, 1,9912, -0,0250]]
+ * If the means are too small we don't have enough information to
+ * meaningfully calculate gains. Freeze the algorithm in that case.
*/
- yMean -= 16;
- cbMean -= 128;
- crMean -= 128;
- double redMean = 1.1636 * yMean - 0.0623 * cbMean + 1.6008 * crMean;
- double greenMean = 1.1636 * yMean - 0.4045 * cbMean - 0.7949 * crMean;
- double blueMean = 1.1636 * yMean + 1.9912 * cbMean - 0.0250 * crMean;
+ if (redMean < kMeanMinThreshold && greenMean < kMeanMinThreshold &&
+ blueMean < kMeanMinThreshold) {
+ frameContext.awb.temperatureK = activeState.awb.temperatureK;
+ return;
+ }
- /* Estimate the red and blue gains to apply in a grey world. */
- double redGain = greenMean / (redMean + 1);
- double blueGain = greenMean / (blueMean + 1);
+ activeState.awb.temperatureK = estimateCCT(redMean, greenMean, blueMean);
- /* Filter the values to avoid oscillations. */
- double speed = 0.2;
- redGain = speed * redGain + (1 - speed) * frameContext.awb.gains.red;
- blueGain = speed * blueGain + (1 - speed) * frameContext.awb.gains.blue;
+ /*
+ * Estimate the red and blue gains to apply in a grey world. The green
+ * gain is hardcoded to 1.0. Avoid divisions by zero by clamping the
+ * divisor to a minimum value of 1.0.
+ */
+ double redGain = greenMean / std::max(redMean, 1.0);
+ double blueGain = greenMean / std::max(blueMean, 1.0);
/*
- * Gain values are unsigned integer value, range 0 to 4 with 8 bit
- * fractional part.
+ * Clamp the gain values to the hardware, which expresses gains as Q2.8
+ * unsigned integer values. Set the minimum just above zero to avoid
+ * divisions by zero when computing the raw means in subsequent
+ * iterations.
*/
- frameContext.awb.gains.red = std::clamp(redGain, 0.0, 1023.0 / 256);
- frameContext.awb.gains.blue = std::clamp(blueGain, 0.0, 1023.0 / 256);
- /* Hardcode the green gain to 1.0. */
- frameContext.awb.gains.green = 1.0;
+ redGain = std::clamp(redGain, 1.0 / 256, 1023.0 / 256);
+ blueGain = std::clamp(blueGain, 1.0 / 256, 1023.0 / 256);
+
+ /* Filter the values to avoid oscillations. */
+ double speed = 0.2;
+ redGain = speed * redGain + (1 - speed) * activeState.awb.gains.automatic.red;
+ blueGain = speed * blueGain + (1 - speed) * activeState.awb.gains.automatic.blue;
+
+ activeState.awb.gains.automatic.red = redGain;
+ activeState.awb.gains.automatic.blue = blueGain;
+ activeState.awb.gains.automatic.green = 1.0;
- frameContext.awb.temperatureK = estimateCCT(redMean, greenMean, blueMean);
+ frameContext.awb.temperatureK = activeState.awb.temperatureK;
- LOG(RkISP1Awb, Debug) << "Gain found for red: " << context.frameContext.awb.gains.red
- << " and for blue: " << context.frameContext.awb.gains.blue;
+ LOG(RkISP1Awb, Debug) << std::showpoint
+ << "Means [" << redMean << ", " << greenMean << ", " << blueMean
+ << "], gains [" << activeState.awb.gains.automatic.red << ", "
+ << activeState.awb.gains.automatic.green << ", "
+ << activeState.awb.gains.automatic.blue << "], temp "
+ << frameContext.awb.temperatureK << "K";
}
REGISTER_IPA_ALGORITHM(Awb, "Awb")
diff --git a/src/ipa/rkisp1/algorithms/awb.h b/src/ipa/rkisp1/algorithms/awb.h
index 667a8beb..d76b5382 100644
--- a/src/ipa/rkisp1/algorithms/awb.h
+++ b/src/ipa/rkisp1/algorithms/awb.h
@@ -16,16 +16,24 @@ namespace ipa::rkisp1::algorithms {
class Awb : public Algorithm
{
public:
- Awb() = default;
+ Awb();
~Awb() = default;
int configure(IPAContext &context, const IPACameraSensorInfo &configInfo) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
- void process(IPAContext &context, IPAFrameContext *frameCtx,
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
+ void queueRequest(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ControlList &controls) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameCtx,
const rkisp1_stat_buffer *stats) override;
private:
uint32_t estimateCCT(double red, double green, double blue);
+
+ bool rgbMode_;
};
} /* namespace ipa::rkisp1::algorithms */
diff --git a/src/ipa/rkisp1/algorithms/blc.cpp b/src/ipa/rkisp1/algorithms/blc.cpp
index a58569fa..15324fb1 100644
--- a/src/ipa/rkisp1/algorithms/blc.cpp
+++ b/src/ipa/rkisp1/algorithms/blc.cpp
@@ -65,10 +65,12 @@ int BlackLevelCorrection::init([[maybe_unused]] IPAContext &context,
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void BlackLevelCorrection::prepare(IPAContext &context,
+void BlackLevelCorrection::prepare([[maybe_unused]] IPAContext &context,
+ const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
rkisp1_params_cfg *params)
{
- if (context.frameContext.frameCount > 0)
+ if (frame > 0)
return;
if (!tuningParameters_)
diff --git a/src/ipa/rkisp1/algorithms/blc.h b/src/ipa/rkisp1/algorithms/blc.h
index 5fc3a80f..0b1a2d43 100644
--- a/src/ipa/rkisp1/algorithms/blc.h
+++ b/src/ipa/rkisp1/algorithms/blc.h
@@ -20,7 +20,9 @@ public:
~BlackLevelCorrection() = default;
int init(IPAContext &context, const YamlObject &tuningData) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
private:
bool tuningParameters_;
diff --git a/src/ipa/rkisp1/algorithms/cproc.cpp b/src/ipa/rkisp1/algorithms/cproc.cpp
index bca5ab69..eaa56c37 100644
--- a/src/ipa/rkisp1/algorithms/cproc.cpp
+++ b/src/ipa/rkisp1/algorithms/cproc.cpp
@@ -38,52 +38,66 @@ LOG_DEFINE_CATEGORY(RkISP1CProc)
*/
void ColorProcessing::queueRequest(IPAContext &context,
[[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
const ControlList &controls)
{
- auto &cproc = context.frameContext.cproc;
+ auto &cproc = context.activeState.cproc;
+ bool update = false;
const auto &brightness = controls.get(controls::Brightness);
if (brightness) {
- cproc.brightness = std::clamp<int>(std::lround(*brightness * 128), -128, 127);
- cproc.updateParams = true;
+ int value = std::clamp<int>(std::lround(*brightness * 128), -128, 127);
+ if (cproc.brightness != value) {
+ cproc.brightness = value;
+ update = true;
+ }
- LOG(RkISP1CProc, Debug) << "Set brightness to " << *brightness;
+ LOG(RkISP1CProc, Debug) << "Set brightness to " << value;
}
const auto &contrast = controls.get(controls::Contrast);
if (contrast) {
- cproc.contrast = std::clamp<int>(std::lround(*contrast * 128), 0, 255);
- cproc.updateParams = true;
+ int value = std::clamp<int>(std::lround(*contrast * 128), 0, 255);
+ if (cproc.contrast != value) {
+ cproc.contrast = value;
+ update = true;
+ }
- LOG(RkISP1CProc, Debug) << "Set contrast to " << *contrast;
+ LOG(RkISP1CProc, Debug) << "Set contrast to " << value;
}
const auto saturation = controls.get(controls::Saturation);
if (saturation) {
- cproc.saturation = std::clamp<int>(std::lround(*saturation * 128), 0, 255);
- cproc.updateParams = true;
+ int value = std::clamp<int>(std::lround(*saturation * 128), 0, 255);
+ if (cproc.saturation != value) {
+ cproc.saturation = value;
+ update = true;
+ }
- LOG(RkISP1CProc, Debug) << "Set saturation to " << *saturation;
+ LOG(RkISP1CProc, Debug) << "Set saturation to " << value;
}
+
+ frameContext.cproc.brightness = cproc.brightness;
+ frameContext.cproc.contrast = cproc.contrast;
+ frameContext.cproc.saturation = cproc.saturation;
+ frameContext.cproc.update = update;
}
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void ColorProcessing::prepare(IPAContext &context,
+void ColorProcessing::prepare([[maybe_unused]] IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
rkisp1_params_cfg *params)
{
- auto &cproc = context.frameContext.cproc;
-
/* Check if the algorithm configuration has been updated. */
- if (!cproc.updateParams)
+ if (!frameContext.cproc.update)
return;
- cproc.updateParams = false;
-
- params->others.cproc_config.brightness = cproc.brightness;
- params->others.cproc_config.contrast = cproc.contrast;
- params->others.cproc_config.sat = cproc.saturation;
+ params->others.cproc_config.brightness = frameContext.cproc.brightness;
+ params->others.cproc_config.contrast = frameContext.cproc.contrast;
+ params->others.cproc_config.sat = frameContext.cproc.saturation;
params->module_en_update |= RKISP1_CIF_ISP_MODULE_CPROC;
params->module_ens |= RKISP1_CIF_ISP_MODULE_CPROC;
diff --git a/src/ipa/rkisp1/algorithms/cproc.h b/src/ipa/rkisp1/algorithms/cproc.h
index 4b7e4064..ba6e901a 100644
--- a/src/ipa/rkisp1/algorithms/cproc.h
+++ b/src/ipa/rkisp1/algorithms/cproc.h
@@ -22,8 +22,11 @@ public:
~ColorProcessing() = default;
void queueRequest(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
const ControlList &controls) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
};
} /* namespace ipa::rkisp1::algorithms */
diff --git a/src/ipa/rkisp1/algorithms/dpcc.cpp b/src/ipa/rkisp1/algorithms/dpcc.cpp
index 69bc651e..d2a015d0 100644
--- a/src/ipa/rkisp1/algorithms/dpcc.cpp
+++ b/src/ipa/rkisp1/algorithms/dpcc.cpp
@@ -231,10 +231,12 @@ int DefectPixelClusterCorrection::init([[maybe_unused]] IPAContext &context,
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void DefectPixelClusterCorrection::prepare(IPAContext &context,
+void DefectPixelClusterCorrection::prepare([[maybe_unused]] IPAContext &context,
+ const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
rkisp1_params_cfg *params)
{
- if (context.frameContext.frameCount > 0)
+ if (frame > 0)
return;
if (!initialized_)
diff --git a/src/ipa/rkisp1/algorithms/dpcc.h b/src/ipa/rkisp1/algorithms/dpcc.h
index a363f7be..894c0249 100644
--- a/src/ipa/rkisp1/algorithms/dpcc.h
+++ b/src/ipa/rkisp1/algorithms/dpcc.h
@@ -20,7 +20,9 @@ public:
~DefectPixelClusterCorrection() = default;
int init(IPAContext &context, const YamlObject &tuningData) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
private:
bool initialized_;
diff --git a/src/ipa/rkisp1/algorithms/dpf.cpp b/src/ipa/rkisp1/algorithms/dpf.cpp
new file mode 100644
index 00000000..f7bc371d
--- /dev/null
+++ b/src/ipa/rkisp1/algorithms/dpf.cpp
@@ -0,0 +1,265 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021-2022, Ideas On Board
+ *
+ * dpf.cpp - RkISP1 Denoise Pre-Filter control
+ */
+
+#include "dpf.h"
+
+#include <cmath>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/control_ids.h>
+
+#include "linux/rkisp1-config.h"
+
+/**
+ * \file dpf.h
+ */
+
+namespace libcamera {
+
+namespace ipa::rkisp1::algorithms {
+
+/**
+ * \class Dpf
+ * \brief RkISP1 Denoise Pre-Filter control
+ *
+ * The denoise pre-filter algorithm is a bilateral filter which combines a
+ * range filter and a domain filter. The denoise pre-filter is applied before
+ * demosaicing.
+ */
+
+LOG_DEFINE_CATEGORY(RkISP1Dpf)
+
+Dpf::Dpf()
+ : initialized_(false), config_({}), strengthConfig_({})
+{
+}
+
+/**
+ * \copydoc libcamera::ipa::Algorithm::init
+ */
+int Dpf::init([[maybe_unused]] IPAContext &context,
+ const YamlObject &tuningData)
+{
+ std::vector<uint8_t> values;
+
+ /*
+ * The domain kernel is configured with a 9x9 kernel for the green
+ * pixels, and a 13x9 or 9x9 kernel for red and blue pixels.
+ */
+ const YamlObject &dFObject = tuningData["DomainFilter"];
+
+ /*
+ * For the green component, we have the 9x9 kernel specified
+ * as 6 coefficients:
+ * Y
+ * ^
+ * 4 | 6 5 4 5 6
+ * 3 | 5 3 3 5
+ * 2 | 5 3 2 3 5
+ * 1 | 3 1 1 3
+ * 0 - 4 2 0 2 4
+ * -1 | 3 1 1 3
+ * -2 | 5 3 2 3 5
+ * -3 | 5 3 3 5
+ * -4 | 6 5 4 5 6
+ * +---------|--------> X
+ * -4....-1 0 1 2 3 4
+ */
+ values = dFObject["g"].getList<uint8_t>().value_or(utils::defopt);
+ if (values.size() != RKISP1_CIF_ISP_DPF_MAX_SPATIAL_COEFFS) {
+ LOG(RkISP1Dpf, Error)
+ << "Invalid 'DomainFilter:g': expected "
+ << RKISP1_CIF_ISP_DPF_MAX_SPATIAL_COEFFS
+ << " elements, got " << values.size();
+ return -EINVAL;
+ }
+
+ std::copy_n(values.begin(), values.size(),
+ std::begin(config_.g_flt.spatial_coeff));
+
+ config_.g_flt.gr_enable = true;
+ config_.g_flt.gb_enable = true;
+
+ /*
+ * For the red and blue components, we have the 13x9 kernel specified
+ * as 6 coefficients:
+ *
+ * Y
+ * ^
+ * 4 | 6 5 4 3 4 5 6
+ * |
+ * 2 | 5 4 2 1 2 4 5
+ * |
+ * 0 - 5 3 1 0 1 3 5
+ * |
+ * -2 | 5 4 2 1 2 4 5
+ * |
+ * -4 | 6 5 4 3 4 5 6
+ * +-------------|------------> X
+ * -6 -4 -2 0 2 4 6
+ *
+ * For a 9x9 kernel, columns -6 and 6 are dropped, so coefficient
+ * number 6 is not used.
+ */
+ values = dFObject["rb"].getList<uint8_t>().value_or(utils::defopt);
+ if (values.size() != RKISP1_CIF_ISP_DPF_MAX_SPATIAL_COEFFS &&
+ values.size() != RKISP1_CIF_ISP_DPF_MAX_SPATIAL_COEFFS - 1) {
+ LOG(RkISP1Dpf, Error)
+ << "Invalid 'DomainFilter:rb': expected "
+ << RKISP1_CIF_ISP_DPF_MAX_SPATIAL_COEFFS - 1
+ << " or " << RKISP1_CIF_ISP_DPF_MAX_SPATIAL_COEFFS
+ << " elements, got " << values.size();
+ return -EINVAL;
+ }
+
+ config_.rb_flt.fltsize = values.size() == RKISP1_CIF_ISP_DPF_MAX_SPATIAL_COEFFS
+ ? RKISP1_CIF_ISP_DPF_RB_FILTERSIZE_13x9
+ : RKISP1_CIF_ISP_DPF_RB_FILTERSIZE_9x9;
+
+ std::copy_n(values.begin(), values.size(),
+ std::begin(config_.rb_flt.spatial_coeff));
+
+ config_.rb_flt.r_enable = true;
+ config_.rb_flt.b_enable = true;
+
+ /*
+ * The range kernel is configured with a noise level lookup table (NLL)
+ * which stores a piecewise linear function that characterizes the
+ * sensor noise profile as a noise level function curve (NLF).
+ */
+ const YamlObject &rFObject = tuningData["NoiseLevelFunction"];
+
+ std::vector<uint16_t> nllValues;
+ nllValues = rFObject["coeff"].getList<uint16_t>().value_or(utils::defopt);
+ if (nllValues.size() != RKISP1_CIF_ISP_DPF_MAX_NLF_COEFFS) {
+ LOG(RkISP1Dpf, Error)
+ << "Invalid 'RangeFilter:coeff': expected "
+ << RKISP1_CIF_ISP_DPF_MAX_NLF_COEFFS
+ << " elements, got " << nllValues.size();
+ return -EINVAL;
+ }
+
+ std::copy_n(nllValues.begin(), nllValues.size(),
+ std::begin(config_.nll.coeff));
+
+ std::string scaleMode = rFObject["scale-mode"].get<std::string>("");
+ if (scaleMode == "linear") {
+ config_.nll.scale_mode = RKISP1_CIF_ISP_NLL_SCALE_LINEAR;
+ } else if (scaleMode == "logarithmic") {
+ config_.nll.scale_mode = RKISP1_CIF_ISP_NLL_SCALE_LOGARITHMIC;
+ } else {
+ LOG(RkISP1Dpf, Error)
+ << "Invalid 'RangeFilter:scale-mode': expected "
+ << "'linear' or 'logarithmic' value, got "
+ << scaleMode;
+ return -EINVAL;
+ }
+
+ const YamlObject &fSObject = tuningData["FilterStrength"];
+
+ strengthConfig_.r = fSObject["r"].get<uint16_t>(64);
+ strengthConfig_.g = fSObject["g"].get<uint16_t>(64);
+ strengthConfig_.b = fSObject["b"].get<uint16_t>(64);
+
+ initialized_ = true;
+
+ return 0;
+}
+
+/**
+ * \copydoc libcamera::ipa::Algorithm::queueRequest
+ */
+void Dpf::queueRequest(IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ControlList &controls)
+{
+ auto &dpf = context.activeState.dpf;
+ bool update = false;
+
+ const auto &denoise = controls.get(controls::draft::NoiseReductionMode);
+ if (denoise) {
+ LOG(RkISP1Dpf, Debug) << "Set denoise to " << *denoise;
+
+ switch (*denoise) {
+ case controls::draft::NoiseReductionModeOff:
+ if (dpf.denoise) {
+ dpf.denoise = false;
+ update = true;
+ }
+ break;
+ case controls::draft::NoiseReductionModeMinimal:
+ case controls::draft::NoiseReductionModeHighQuality:
+ case controls::draft::NoiseReductionModeFast:
+ if (!dpf.denoise) {
+ dpf.denoise = true;
+ update = true;
+ }
+ break;
+ default:
+ LOG(RkISP1Dpf, Error)
+ << "Unsupported denoise value "
+ << *denoise;
+ break;
+ }
+ }
+
+ frameContext.dpf.denoise = dpf.denoise;
+ frameContext.dpf.update = update;
+}
+
+/**
+ * \copydoc libcamera::ipa::Algorithm::prepare
+ */
+void Dpf::prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext, rkisp1_params_cfg *params)
+{
+ if (!initialized_)
+ return;
+
+ if (frame == 0) {
+ params->others.dpf_config = config_;
+ params->others.dpf_strength_config = strengthConfig_;
+
+ const auto &awb = context.configuration.awb;
+ const auto &lsc = context.configuration.lsc;
+ auto &mode = params->others.dpf_config.gain.mode;
+
+ /*
+ * The DPF needs to take into account the total amount of
+ * digital gain, which comes from the AWB and LSC modules. The
+ * DPF hardware can be programmed with a digital gain value
+ * manually, but can also use the gains supplied by the AWB and
+ * LSC modules automatically when they are enabled. Use that
+ * mode of operation as it simplifies control of the DPF.
+ */
+ if (awb.enabled && lsc.enabled)
+ mode = RKISP1_CIF_ISP_DPF_GAIN_USAGE_AWB_LSC_GAINS;
+ else if (awb.enabled)
+ mode = RKISP1_CIF_ISP_DPF_GAIN_USAGE_AWB_GAINS;
+ else if (lsc.enabled)
+ mode = RKISP1_CIF_ISP_DPF_GAIN_USAGE_LSC_GAINS;
+ else
+ mode = RKISP1_CIF_ISP_DPF_GAIN_USAGE_DISABLED;
+
+ params->module_cfg_update |= RKISP1_CIF_ISP_MODULE_DPF |
+ RKISP1_CIF_ISP_MODULE_DPF_STRENGTH;
+ }
+
+ if (frameContext.dpf.update) {
+ params->module_en_update |= RKISP1_CIF_ISP_MODULE_DPF;
+ if (frameContext.dpf.denoise)
+ params->module_ens |= RKISP1_CIF_ISP_MODULE_DPF;
+ }
+}
+
+REGISTER_IPA_ALGORITHM(Dpf, "Dpf")
+
+} /* namespace ipa::rkisp1::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/rkisp1/algorithms/dpf.h b/src/ipa/rkisp1/algorithms/dpf.h
new file mode 100644
index 00000000..e232cad4
--- /dev/null
+++ b/src/ipa/rkisp1/algorithms/dpf.h
@@ -0,0 +1,39 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021-2022, Ideas On Board
+ *
+ * dpf.h - RkISP1 Denoise Pre-Filter control
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include "algorithm.h"
+
+namespace libcamera {
+
+namespace ipa::rkisp1::algorithms {
+
+class Dpf : public Algorithm
+{
+public:
+ Dpf();
+ ~Dpf() = default;
+
+ int init(IPAContext &context, const YamlObject &tuningData) override;
+ void queueRequest(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ControlList &controls) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
+
+private:
+ bool initialized_;
+ struct rkisp1_cif_isp_dpf_config config_;
+ struct rkisp1_cif_isp_dpf_strength_config strengthConfig_;
+};
+
+} /* namespace ipa::rkisp1::algorithms */
+} /* namespace libcamera */
diff --git a/src/ipa/rkisp1/algorithms/filter.cpp b/src/ipa/rkisp1/algorithms/filter.cpp
index 8ca10fd1..4b89c05a 100644
--- a/src/ipa/rkisp1/algorithms/filter.cpp
+++ b/src/ipa/rkisp1/algorithms/filter.cpp
@@ -44,14 +44,20 @@ static constexpr uint32_t kFiltModeDefault = 0x000004f2;
*/
void Filter::queueRequest(IPAContext &context,
[[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
const ControlList &controls)
{
- auto &filter = context.frameContext.filter;
+ auto &filter = context.activeState.filter;
+ bool update = false;
const auto &sharpness = controls.get(controls::Sharpness);
if (sharpness) {
- filter.sharpness = std::round(std::clamp(*sharpness, 0.0f, 10.0f));
- filter.updateParams = true;
+ unsigned int value = std::round(std::clamp(*sharpness, 0.0f, 10.0f));
+
+ if (filter.sharpness != value) {
+ filter.sharpness = value;
+ update = true;
+ }
LOG(RkISP1Filter, Debug) << "Set sharpness to " << *sharpness;
}
@@ -62,39 +68,48 @@ void Filter::queueRequest(IPAContext &context,
switch (*denoise) {
case controls::draft::NoiseReductionModeOff:
- filter.denoise = 0;
- filter.updateParams = true;
+ if (filter.denoise != 0) {
+ filter.denoise = 0;
+ update = true;
+ }
break;
case controls::draft::NoiseReductionModeMinimal:
- filter.denoise = 1;
- filter.updateParams = true;
+ if (filter.denoise != 1) {
+ filter.denoise = 1;
+ update = true;
+ }
break;
case controls::draft::NoiseReductionModeHighQuality:
case controls::draft::NoiseReductionModeFast:
- filter.denoise = 3;
- filter.updateParams = true;
+ if (filter.denoise != 3) {
+ filter.denoise = 3;
+ update = true;
+ }
break;
default:
LOG(RkISP1Filter, Error)
<< "Unsupported denoise value "
<< *denoise;
+ break;
}
}
+
+ frameContext.filter.denoise = filter.denoise;
+ frameContext.filter.sharpness = filter.sharpness;
+ frameContext.filter.update = update;
}
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void Filter::prepare(IPAContext &context, rkisp1_params_cfg *params)
+void Filter::prepare([[maybe_unused]] IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext, rkisp1_params_cfg *params)
{
- auto &filter = context.frameContext.filter;
-
/* Check if the algorithm configuration has been updated. */
- if (!filter.updateParams)
+ if (!frameContext.filter.update)
return;
- filter.updateParams = false;
-
static constexpr uint16_t filt_fac_sh0[] = {
0x04, 0x07, 0x0a, 0x0c, 0x10, 0x14, 0x1a, 0x1e, 0x24, 0x2a, 0x30
};
@@ -143,8 +158,8 @@ void Filter::prepare(IPAContext &context, rkisp1_params_cfg *params)
0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3
};
- uint8_t denoise = filter.denoise;
- uint8_t sharpness = filter.sharpness;
+ uint8_t denoise = frameContext.filter.denoise;
+ uint8_t sharpness = frameContext.filter.sharpness;
auto &flt_config = params->others.flt_config;
flt_config.fac_sh0 = filt_fac_sh0[sharpness];
diff --git a/src/ipa/rkisp1/algorithms/filter.h b/src/ipa/rkisp1/algorithms/filter.h
index 9eb170eb..3fd882ea 100644
--- a/src/ipa/rkisp1/algorithms/filter.h
+++ b/src/ipa/rkisp1/algorithms/filter.h
@@ -22,8 +22,11 @@ public:
~Filter() = default;
void queueRequest(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
const ControlList &controls) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
};
} /* namespace ipa::rkisp1::algorithms */
diff --git a/src/ipa/rkisp1/algorithms/gsl.cpp b/src/ipa/rkisp1/algorithms/gsl.cpp
index 2fd1a23d..9cbad020 100644
--- a/src/ipa/rkisp1/algorithms/gsl.cpp
+++ b/src/ipa/rkisp1/algorithms/gsl.cpp
@@ -118,10 +118,12 @@ int GammaSensorLinearization::init([[maybe_unused]] IPAContext &context,
/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void GammaSensorLinearization::prepare(IPAContext &context,
+void GammaSensorLinearization::prepare([[maybe_unused]] IPAContext &context,
+ const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
rkisp1_params_cfg *params)
{
- if (context.frameContext.frameCount > 0)
+ if (frame > 0)
return;
if (!initialized_)
diff --git a/src/ipa/rkisp1/algorithms/gsl.h b/src/ipa/rkisp1/algorithms/gsl.h
index db287dc2..5024b683 100644
--- a/src/ipa/rkisp1/algorithms/gsl.h
+++ b/src/ipa/rkisp1/algorithms/gsl.h
@@ -20,7 +20,9 @@ public:
~GammaSensorLinearization() = default;
int init(IPAContext &context, const YamlObject &tuningData) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
private:
bool initialized_;
diff --git a/src/ipa/rkisp1/algorithms/lsc.cpp b/src/ipa/rkisp1/algorithms/lsc.cpp
index 05c8c0da..44245caa 100644
--- a/src/ipa/rkisp1/algorithms/lsc.cpp
+++ b/src/ipa/rkisp1/algorithms/lsc.cpp
@@ -58,7 +58,7 @@ static std::vector<double> parseSizes(const YamlObject &tuningData,
* prevent an exact match (further adjustments will be performed in
* LensShadingCorrection::prepare()).
*/
- float sum = std::accumulate(sizes.begin(), sizes.end(), 0.0f);
+ double sum = std::accumulate(sizes.begin(), sizes.end(), 0.0);
if (sum < 0.495 || sum > 0.505) {
LOG(RkISP1Lsc, Error)
<< "Invalid '" << prop << "' values: sum of the elements"
@@ -120,12 +120,23 @@ int LensShadingCorrection::init([[maybe_unused]] IPAContext &context,
}
/**
+ * \copydoc libcamera::ipa::Algorithm::configure
+ */
+int LensShadingCorrection::configure(IPAContext &context,
+ [[maybe_unused]] const IPACameraSensorInfo &configInfo)
+{
+ context.configuration.lsc.enabled = initialized_;
+ return 0;
+}
+
+/**
* \copydoc libcamera::ipa::Algorithm::prepare
*/
-void LensShadingCorrection::prepare(IPAContext &context,
+void LensShadingCorrection::prepare(IPAContext &context, const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
rkisp1_params_cfg *params)
{
- if (context.frameContext.frameCount > 0)
+ if (frame > 0)
return;
if (!initialized_)
diff --git a/src/ipa/rkisp1/algorithms/lsc.h b/src/ipa/rkisp1/algorithms/lsc.h
index fdb2ec1d..da957d3e 100644
--- a/src/ipa/rkisp1/algorithms/lsc.h
+++ b/src/ipa/rkisp1/algorithms/lsc.h
@@ -20,7 +20,10 @@ public:
~LensShadingCorrection() = default;
int init(IPAContext &context, const YamlObject &tuningData) override;
- void prepare(IPAContext &context, rkisp1_params_cfg *params) override;
+ int configure(IPAContext &context, const IPACameraSensorInfo &configInfo) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ rkisp1_params_cfg *params) override;
private:
bool initialized_;
diff --git a/src/ipa/rkisp1/algorithms/meson.build b/src/ipa/rkisp1/algorithms/meson.build
index e48974b4..93a48329 100644
--- a/src/ipa/rkisp1/algorithms/meson.build
+++ b/src/ipa/rkisp1/algorithms/meson.build
@@ -6,6 +6,7 @@ rkisp1_ipa_algorithms = files([
'blc.cpp',
'cproc.cpp',
'dpcc.cpp',
+ 'dpf.cpp',
'filter.cpp',
'gsl.cpp',
'lsc.cpp',
diff --git a/src/ipa/rkisp1/data/ov5640.yaml b/src/ipa/rkisp1/data/ov5640.yaml
index 45d4bb77..3dc369ac 100644
--- a/src/ipa/rkisp1/data/ov5640.yaml
+++ b/src/ipa/rkisp1/data/ov5640.yaml
@@ -156,5 +156,20 @@ algorithms:
rnd-offsets:
green: 2
red-blue: 2
+ - Dpf:
+ DomainFilter:
+ g: [ 16, 16, 16, 16, 16, 16 ]
+ rb: [ 16, 16, 16, 16, 16, 16 ]
+ NoiseLevelFunction:
+ coeff: [
+ 1023, 1023, 1023, 1023, 1023, 1023, 1023, 1023,
+ 1023, 1023, 1023, 1023, 1023, 1023, 1023, 1023,
+ 1023
+ ]
+ scale-mode: "linear"
+ FilterStrength:
+ r: 64
+ g: 64
+ b: 64
- Filter:
...
diff --git a/src/ipa/rkisp1/ipa_context.cpp b/src/ipa/rkisp1/ipa_context.cpp
index ef8bb8e9..b00dc29c 100644
--- a/src/ipa/rkisp1/ipa_context.cpp
+++ b/src/ipa/rkisp1/ipa_context.cpp
@@ -25,37 +25,6 @@ namespace libcamera::ipa::rkisp1 {
*/
/**
- * \struct IPAFrameContext
- * \brief Per-frame context for algorithms
- *
- * The frame context stores data specific to a single frame processed by the
- * IPA. Each frame processed by the IPA has a context associated with it,
- * accessible through the IPAContext structure.
- *
- * \todo Detail how to access contexts for a particular frame
- *
- * Each of the fields in the frame context belongs to either a specific
- * algorithm, or to the top-level IPA module. A field may be read by any
- * algorithm, but should only be written by its owner.
- */
-
-/**
- * \struct IPAContext
- * \brief Global IPA context data shared between all algorithms
- *
- * \var IPAContext::configuration
- * \brief The IPA session configuration, immutable during the session
- *
- * \var IPAContext::frameContext
- * \brief The frame context for the frame being processed
- *
- * \todo While the frame context is supposed to be per-frame, this
- * single frame context stores data related to both the current frame
- * and the previous frames, with fields being updated as the algorithms
- * are run. This needs to be turned into real per-frame data storage.
- */
-
-/**
* \var IPASessionConfiguration::agc
* \brief AGC parameters configuration of the IPA
*
@@ -87,6 +56,21 @@ namespace libcamera::ipa::rkisp1 {
*
* \var IPASessionConfiguration::awb.measureWindow
* \brief AWB measure window
+ *
+ * \var IPASessionConfiguration::awb.enabled
+ * \brief Indicates if the AWB hardware is enabled and applies colour gains
+ *
+ * The AWB module of the ISP applies colour gains and computes statistics. It is
+ * enabled when the AWB algorithm is loaded, regardless of whether the algorithm
+ * operates in manual or automatic mode.
+ */
+
+/**
+ * \var IPASessionConfiguration::lsc
+ * \brief Lens Shading Correction configuration of the IPA
+ *
+ * \var IPASessionConfiguration::lsc.enabled
+ * \brief Indicates if the LSC hardware is enabled
*/
/**
@@ -101,11 +85,155 @@ namespace libcamera::ipa::rkisp1 {
*/
/**
+ * \struct IPAActiveState
+ * \brief Active state for algorithms
+ *
+ * The active state contains all algorithm-specific data that needs to be
+ * maintained by algorithms across frames. Unlike the session configuration,
+ * the active state is mutable and constantly updated by algorithms. The active
+ * state is accessible through the IPAContext structure.
+ *
+ * The active state stores two distinct categories of information:
+ *
+ * - The consolidated value of all algorithm controls. Requests passed to
+ * the queueRequest() function store values for controls that the
+ * application wants to modify for that particular frame, and the
+ * queueRequest() function updates the active state with those values.
+ * The active state thus contains a consolidated view of the value of all
+ * controls handled by the algorithm.
+ *
+ * - The value of parameters computed by the algorithm when running in auto
+ * mode. Algorithms running in auto mode compute new parameters every
+ * time statistics buffers are received (either synchronously, or
+ * possibly in a background thread). The latest computed value of those
+ * parameters is stored in the active state in the process() function.
+ *
+ * Each of the members in the active state belongs to a specific algorithm. A
+ * member may be read by any algorithm, but shall only be written by its owner.
+ */
+
+/**
+ * \var IPAActiveState::agc
+ * \brief State for the Automatic Gain Control algorithm
+ *
+ * The exposure and gain are the latest values computed by the AGC algorithm.
+ *
+ * \var IPAActiveState::agc.exposure
+ * \brief Exposure time expressed as a number of lines
+ *
+ * \var IPAActiveState::agc.gain
+ * \brief Analogue gain multiplier
+ */
+
+/**
+ * \var IPAActiveState::awb
+ * \brief State for the Automatic White Balance algorithm
+ *
+ * \struct IPAActiveState::awb.gains
+ * \brief White balance gains
+ *
+ * \struct IPAActiveState::awb.gains.manual
+ * \brief Manual white balance gains (set through requests)
+ *
+ * \var IPAActiveState::awb.gains.manual.red
+ * \brief Manual white balance gain for R channel
+ *
+ * \var IPAActiveState::awb.gains.manual.green
+ * \brief Manual white balance gain for G channel
+ *
+ * \var IPAActiveState::awb.gains.manual.blue
+ * \brief Manual white balance gain for B channel
+ *
+ * \struct IPAActiveState::awb.gains.automatic
+ * \brief Automatic white balance gains (computed by the algorithm)
+ *
+ * \var IPAActiveState::awb.gains.automatic.red
+ * \brief Automatic white balance gain for R channel
+ *
+ * \var IPAActiveState::awb.gains.automatic.green
+ * \brief Automatic white balance gain for G channel
+ *
+ * \var IPAActiveState::awb.gains.automatic.blue
+ * \brief Automatic white balance gain for B channel
+ *
+ * \var IPAActiveState::awb.temperatureK
+ * \brief Estimated color temperature
+ *
+ * \var IPAActiveState::awb.autoEnabled
+ * \brief Whether the Auto White Balance algorithm is enabled
+ */
+
+/**
+ * \var IPAActiveState::cproc
+ * \brief State for the Color Processing algorithm
+ *
+ * \struct IPAActiveState::cproc.brightness
+ * \brief Brightness level
+ *
+ * \var IPAActiveState::cproc.contrast
+ * \brief Contrast level
+ *
+ * \var IPAActiveState::cproc.saturation
+ * \brief Saturation level
+ */
+
+/**
+ * \var IPAActiveState::dpf
+ * \brief State for the Denoise Pre-Filter algorithm
+ *
+ * \var IPAActiveState::dpf.denoise
+ * \brief Indicates if denoise is activated
+ */
+
+/**
+ * \var IPAActiveState::filter
+ * \brief State for the Filter algorithm
+ *
+ * \struct IPAActiveState::filter.denoise
+ * \brief Denoising level
+ *
+ * \var IPAActiveState::filter.sharpness
+ * \brief Sharpness level
+ */
+
+/**
+ * \struct IPAFrameContext
+ * \brief Per-frame context for algorithms
+ *
+ * The frame context stores two distinct categories of information:
+ *
+ * - The value of the controls to be applied to the frame. These values are
+ * typically set in the queueRequest() function, from the consolidated
+ * control values stored in the active state. The frame context thus stores
+ * values for all controls related to the algorithm, not limited to the
+ * controls specified in the corresponding request, but consolidated from all
+ * requests that have been queued so far.
+ *
+ * For controls that can be set manually or computed by an algorithm
+ * (depending on the algorithm operation mode), such as for instance the
+ * colour gains for the AWB algorithm, the control value will be stored in
+ * the frame context in the queueRequest() function only when operating in
+ * manual mode. When operating in auto mode, the values are computed by the
+ * algorithm in process(), stored in the active state, and copied to the
+ * frame context in prepare(), just before being stored in the ISP parameters
+ * buffer.
+ *
+ * The queueRequest() function can also store ancillary data in the frame
+ * context, such as flags to indicate if (and what) control values have
+ * changed compared to the previous request.
+ *
+ * - Status information computed by the algorithm for a frame. For instance,
+ * the colour temperature estimated by the AWB algorithm from ISP statistics
+ * calculated on a frame is stored in the frame context for that frame in
+ * the process() function.
+ */
+
+/**
* \var IPAFrameContext::agc
- * \brief Context for the Automatic Gain Control algorithm
+ * \brief Automatic Gain Control parameters for this frame
*
- * The exposure and gain determined are expected to be applied to the sensor
- * at the earliest opportunity.
+ * The exposure and gain are provided by the AGC algorithm, and are to be
+ * applied to the sensor in order to take effect for this frame.
*
* \var IPAFrameContext::agc.exposure
* \brief Exposure time expressed as a number of lines
@@ -118,7 +246,7 @@ namespace libcamera::ipa::rkisp1 {
/**
* \var IPAFrameContext::awb
- * \brief Context for the Automatic White Balance algorithm
+ * \brief Automatic White Balance parameters for this frame
*
* \struct IPAFrameContext::awb.gains
* \brief White balance gains
@@ -134,11 +262,14 @@ namespace libcamera::ipa::rkisp1 {
*
* \var IPAFrameContext::awb.temperatureK
* \brief Estimated color temperature
+ *
+ * \var IPAFrameContext::awb.autoEnabled
+ * \brief Whether the Auto White Balance algorithm is enabled
*/
/**
* \var IPAFrameContext::cproc
- * \brief Context for the Color Processing algorithm
+ * \brief Color Processing parameters for this frame
*
* \struct IPAFrameContext::cproc.brightness
* \brief Brightness level
@@ -149,13 +280,26 @@ namespace libcamera::ipa::rkisp1 {
* \var IPAFrameContext::cproc.saturation
* \brief Saturation level
*
- * \var IPAFrameContext::cproc.updateParams
- * \brief Indicates if ISP parameters need to be updated
+ * \var IPAFrameContext::cproc.update
+ * \brief Indicates if the color processing parameters have been updated
+ * compared to the previous frame
+ */
+
+/**
+ * \var IPAFrameContext::dpf
+ * \brief Denoise Pre-Filter parameters for this frame
+ *
+ * \var IPAFrameContext::dpf.denoise
+ * \brief Indicates if denoise is activated
+ *
+ * \var IPAFrameContext::dpf.update
+ * \brief Indicates if the denoise pre-filter parameters have been updated
+ * compared to the previous frame
*/
/**
* \var IPAFrameContext::filter
- * \brief Context for the Filter algorithm
+ * \brief Filter parameters for this frame
*
* \struct IPAFrameContext::filter.denoise
* \brief Denoising level
@@ -164,12 +308,13 @@ namespace libcamera::ipa::rkisp1 {
* \brief Sharpness level
*
* \var IPAFrameContext::filter.updateParams
- * \brief Indicates if ISP parameters need to be updated
+ * \brief Indicates if the filter parameters have been updated compared to the
+ * previous frame
*/
/**
* \var IPAFrameContext::sensor
- * \brief Effective sensor values
+ * \brief Sensor configuration that used been used for this frame
*
* \var IPAFrameContext::sensor.exposure
* \brief Exposure time expressed as a number of lines
@@ -179,12 +324,17 @@ namespace libcamera::ipa::rkisp1 {
*/
/**
- * \var IPAFrameContext::frameCount
- * \brief Counter of requests queued to the IPA module
+ * \struct IPAContext
+ * \brief Global IPA context data shared between all algorithms
+ *
+ * \var IPAContext::configuration
+ * \brief The IPA session configuration, immutable during the session
+ *
+ * \var IPAContext::activeState
+ * \brief The IPA active state, storing the latest state for all algorithms
*
- * The counter is reset to 0 when the IPA module is configured, and is
- * incremented for each request being queued, after calling the
- * Algorithm::prepare() function of all algorithms.
+ * \var IPAContext::frameContexts
+ * \brief Ring buffer of per-frame contexts
*/
} /* namespace libcamera::ipa::rkisp1 */
diff --git a/src/ipa/rkisp1/ipa_context.h b/src/ipa/rkisp1/ipa_context.h
index 2bdb6a81..c85d8abe 100644
--- a/src/ipa/rkisp1/ipa_context.h
+++ b/src/ipa/rkisp1/ipa_context.h
@@ -14,6 +14,8 @@
#include <libcamera/geometry.h>
+#include <libipa/fc_queue.h>
+
namespace libcamera {
namespace ipa::rkisp1 {
@@ -29,9 +31,14 @@ struct IPASessionConfiguration {
struct {
struct rkisp1_cif_isp_window measureWindow;
+ bool enabled;
} awb;
struct {
+ bool enabled;
+ } lsc;
+
+ struct {
utils::Duration lineDuration;
Size size;
} sensor;
@@ -41,7 +48,47 @@ struct IPASessionConfiguration {
} hw;
};
-struct IPAFrameContext {
+struct IPAActiveState {
+ struct {
+ uint32_t exposure;
+ double gain;
+ } agc;
+
+ struct {
+ struct {
+ struct {
+ double red;
+ double green;
+ double blue;
+ } manual;
+ struct {
+ double red;
+ double green;
+ double blue;
+ } automatic;
+ } gains;
+
+ unsigned int temperatureK;
+ bool autoEnabled;
+ } awb;
+
+ struct {
+ int8_t brightness;
+ uint8_t contrast;
+ uint8_t saturation;
+ } cproc;
+
+ struct {
+ bool denoise;
+ } dpf;
+
+ struct {
+ uint8_t denoise;
+ uint8_t sharpness;
+ } filter;
+};
+
+struct IPAFrameContext : public FrameContext {
struct {
uint32_t exposure;
double gain;
@@ -54,33 +101,39 @@ struct IPAFrameContext {
double blue;
} gains;
- double temperatureK;
+ unsigned int temperatureK;
+ bool autoEnabled;
} awb;
struct {
int8_t brightness;
uint8_t contrast;
uint8_t saturation;
- bool updateParams;
+ bool update;
} cproc;
struct {
+ bool denoise;
+ bool update;
+ } dpf;
+
+ struct {
uint8_t denoise;
uint8_t sharpness;
- bool updateParams;
+ bool update;
} filter;
struct {
uint32_t exposure;
double gain;
} sensor;
-
- unsigned int frameCount;
};
struct IPAContext {
IPASessionConfiguration configuration;
- IPAFrameContext frameContext;
+ IPAActiveState activeState;
+
+ FCQueue<IPAFrameContext> frameContexts;
};
} /* namespace ipa::rkisp1 */
diff --git a/src/ipa/rkisp1/rkisp1.cpp b/src/ipa/rkisp1/rkisp1.cpp
index 17d42d38..32feb168 100644
--- a/src/ipa/rkisp1/rkisp1.cpp
+++ b/src/ipa/rkisp1/rkisp1.cpp
@@ -40,13 +40,18 @@ using namespace std::literals::chrono_literals;
namespace ipa::rkisp1 {
+/* Maximum number of frame contexts to be held */
+static constexpr uint32_t kMaxFrameContexts = 16;
+
class IPARkISP1 : public IPARkISP1Interface, public Module
{
public:
+ IPARkISP1();
+
int init(const IPASettings &settings, unsigned int hwRevision,
ControlInfoMap *ipaControls) override;
int start() override;
- void stop() override {}
+ void stop() override;
int configure(const IPACameraSensorInfo &info,
const std::map<uint32_t, IPAStream> &streamConfig,
@@ -71,9 +76,6 @@ private:
ControlInfoMap ctrls_;
- /* Camera sensor controls. */
- bool autoExposure_;
-
/* revision-specific data */
rkisp1_cif_isp_version hwRevision_;
unsigned int hwHistBinNMax_;
@@ -92,6 +94,8 @@ namespace {
/* List of controls handled by the RkISP1 IPA */
const ControlInfoMap::Map rkisp1Controls{
{ &controls::AeEnable, ControlInfo(false, true) },
+ { &controls::AwbEnable, ControlInfo(false, true) },
+ { &controls::ColourGains, ControlInfo(0.0f, 3.996f, 1.0f) },
{ &controls::Brightness, ControlInfo(-1.0f, 0.993f) },
{ &controls::Contrast, ControlInfo(0.0f, 1.993f) },
{ &controls::Saturation, ControlInfo(0.0f, 1.993f) },
@@ -101,6 +105,11 @@ const ControlInfoMap::Map rkisp1Controls{
} /* namespace */
+IPARkISP1::IPARkISP1()
+ : context_({ {}, {}, { kMaxFrameContexts } })
+{
+}
+
std::string IPARkISP1::logPrefix() const
{
return "rkisp1";
@@ -142,7 +151,7 @@ int IPARkISP1::init(const IPASettings &settings, unsigned int hwRevision,
}
/* Load the tuning data file. */
- File file(settings.configurationFile.c_str());
+ File file(settings.configurationFile);
if (!file.open(File::OpenModeFlag::ReadOnly)) {
int ret = file.error();
LOG(IPARkISP1, Error)
@@ -186,6 +195,11 @@ int IPARkISP1::start()
return 0;
}
+void IPARkISP1::stop()
+{
+ context_.frameContexts.clear();
+}
+
/**
* \todo The RkISP1 pipeline currently provides an empty IPACameraSensorInfo
* if the connected sensor does not provide enough information to properly
@@ -213,8 +227,6 @@ int IPARkISP1::configure([[maybe_unused]] const IPACameraSensorInfo &info,
return -EINVAL;
}
- autoExposure_ = true;
-
int32_t minExposure = itExp->second.min().get<int32_t>();
int32_t maxExposure = itExp->second.max().get<int32_t>();
@@ -225,8 +237,10 @@ int IPARkISP1::configure([[maybe_unused]] const IPACameraSensorInfo &info,
<< "Exposure: " << minExposure << "-" << maxExposure
<< " Gain: " << minGain << "-" << maxGain;
- /* Clean context at configuration */
- context_ = {};
+ /* Clear the IPA context before the streaming session. */
+ context_.configuration = {};
+ context_.activeState = {};
+ context_.frameContexts.clear();
/* Set the hardware revision for the algorithms. */
context_.configuration.hw.revision = hwRevision_;
@@ -246,8 +260,6 @@ int IPARkISP1::configure([[maybe_unused]] const IPACameraSensorInfo &info,
context_.configuration.agc.minAnalogueGain = camHelper_->gain(minGain);
context_.configuration.agc.maxAnalogueGain = camHelper_->gain(maxGain);
- context_.frameContext.frameCount = 0;
-
for (auto const &algo : algorithms()) {
int ret = algo->configure(context_, info);
if (ret)
@@ -289,12 +301,16 @@ void IPARkISP1::unmapBuffers(const std::vector<unsigned int> &ids)
void IPARkISP1::queueRequest(const uint32_t frame, const ControlList &controls)
{
+ IPAFrameContext &frameContext = context_.frameContexts.alloc(frame);
+
for (auto const &algo : algorithms())
- algo->queueRequest(context_, frame, controls);
+ algo->queueRequest(context_, frame, frameContext, controls);
}
void IPARkISP1::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
{
+ IPAFrameContext &frameContext = context_.frameContexts.get(frame);
+
rkisp1_params_cfg *params =
reinterpret_cast<rkisp1_params_cfg *>(
mappedBuffers_.at(bufferId).planes()[0].data());
@@ -303,28 +319,29 @@ void IPARkISP1::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
memset(params, 0, sizeof(*params));
for (auto const &algo : algorithms())
- algo->prepare(context_, params);
+ algo->prepare(context_, frame, frameContext, params);
paramsBufferReady.emit(frame);
- context_.frameContext.frameCount++;
}
void IPARkISP1::processStatsBuffer(const uint32_t frame, const uint32_t bufferId,
const ControlList &sensorControls)
{
+ IPAFrameContext &frameContext = context_.frameContexts.get(frame);
+
const rkisp1_stat_buffer *stats =
reinterpret_cast<rkisp1_stat_buffer *>(
mappedBuffers_.at(bufferId).planes()[0].data());
- context_.frameContext.sensor.exposure =
+ frameContext.sensor.exposure =
sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>();
- context_.frameContext.sensor.gain =
+ frameContext.sensor.gain =
camHelper_->gain(sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>());
unsigned int aeState = 0;
for (auto const &algo : algorithms())
- algo->process(context_, nullptr, stats);
+ algo->process(context_, frame, frameContext, stats);
setControls(frame);
@@ -333,8 +350,14 @@ void IPARkISP1::processStatsBuffer(const uint32_t frame, const uint32_t bufferId
void IPARkISP1::setControls(unsigned int frame)
{
- uint32_t exposure = context_.frameContext.agc.exposure;
- uint32_t gain = camHelper_->gainCode(context_.frameContext.agc.gain);
+ /*
+ * \todo The frame number is most likely wrong here, we need to take
+ * internal sensor delays and other timing parameters into account.
+ */
+
+ IPAFrameContext &frameContext = context_.frameContexts.get(frame);
+ uint32_t exposure = frameContext.agc.exposure;
+ uint32_t gain = camHelper_->gainCode(frameContext.agc.gain);
ControlList ctrls(ctrls_);
ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure));
diff --git a/src/libcamera/base/meson.build b/src/libcamera/base/meson.build
index 7030ad1f..3b9d74ef 100644
--- a/src/libcamera/base/meson.build
+++ b/src/libcamera/base/meson.build
@@ -22,8 +22,8 @@ libcamera_base_sources = files([
'utils.cpp',
])
-libdw = cc.find_library('libdw', required : false)
-libunwind = cc.find_library('libunwind', required : false)
+libdw = dependency('libdw', required : false)
+libunwind = dependency('libunwind', required : false)
if cc.has_header_symbol('execinfo.h', 'backtrace')
config_h.set('HAVE_BACKTRACE', 1)
diff --git a/src/libcamera/camera.cpp b/src/libcamera/camera.cpp
index 3910915c..9fe29ca9 100644
--- a/src/libcamera/camera.cpp
+++ b/src/libcamera/camera.cpp
@@ -317,17 +317,6 @@ std::size_t CameraConfiguration::size() const
return config_.size();
}
-namespace {
-
-bool isRaw(const PixelFormat &pixFmt)
-{
- const PixelFormatInfo &info = PixelFormatInfo::info(pixFmt);
- return info.isValid() &&
- info.colourEncoding == PixelFormatInfo::ColourEncodingRAW;
-}
-
-} /* namespace */
-
/**
* \enum CameraConfiguration::ColorSpaceFlag
* \brief Specify the behaviour of validateColorSpaces
@@ -368,29 +357,31 @@ CameraConfiguration::Status CameraConfiguration::validateColorSpaces(ColorSpaceF
Status status = Valid;
/*
- * Set all raw streams to the Raw color space, and make a note of the largest
- * non-raw stream with a defined color space (if there is one).
+ * Set all raw streams to the Raw color space, and make a note of the
+ * largest non-raw stream with a defined color space (if there is one).
*/
- int index = -1;
+ std::optional<ColorSpace> colorSpace;
+
for (auto [i, cfg] : utils::enumerate(config_)) {
- if (isRaw(cfg.pixelFormat)) {
- if (cfg.colorSpace != ColorSpace::Raw) {
- cfg.colorSpace = ColorSpace::Raw;
- status = Adjusted;
- }
- } else if (cfg.colorSpace && (index == -1 || cfg.size > config_[i].size)) {
- index = i;
- }
+ if (!cfg.colorSpace)
+ continue;
+
+ if (cfg.colorSpace->adjust(cfg.pixelFormat))
+ status = Adjusted;
+
+ if (cfg.colorSpace != ColorSpace::Raw &&
+ (!colorSpace || cfg.size > config_[i].size))
+ colorSpace = cfg.colorSpace;
}
- if (index < 0 || !(flags & ColorSpaceFlag::StreamsShareColorSpace))
+ if (!colorSpace || !(flags & ColorSpaceFlag::StreamsShareColorSpace))
return status;
/* Make all output color spaces the same, if requested. */
for (auto &cfg : config_) {
- if (!isRaw(cfg.pixelFormat) &&
- cfg.colorSpace != config_[index].colorSpace) {
- cfg.colorSpace = config_[index].colorSpace;
+ if (cfg.colorSpace != ColorSpace::Raw &&
+ cfg.colorSpace != colorSpace) {
+ cfg.colorSpace = colorSpace;
status = Adjusted;
}
}
@@ -508,6 +499,11 @@ static const char *const camera_state_names[] = {
"Running",
};
+bool Camera::Private::isAcquired() const
+{
+ return state_.load(std::memory_order_acquire) == CameraRunning;
+}
+
bool Camera::Private::isRunning() const
{
return state_.load(std::memory_order_acquire) == CameraRunning;
@@ -811,7 +807,7 @@ int Camera::exportFrameBuffers(Stream *stream,
* not blocking, if the device has already been acquired (by the same or another
* process) the -EBUSY error code is returned.
*
- * Acquiring a camera will limit usage of any other camera(s) provided by the
+ * Acquiring a camera may limit usage of any other camera(s) provided by the
* same pipeline handler to the same instance of libcamera. The limit is in
* effect until all cameras from the pipeline handler are released. Other
* instances of libcamera can still list and examine the cameras but will fail
@@ -839,7 +835,7 @@ int Camera::acquire()
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
- if (!d->pipe_->lock()) {
+ if (!d->pipe_->acquire()) {
LOG(Camera, Info)
<< "Pipeline handler in use by another process";
return -EBUSY;
@@ -873,7 +869,8 @@ int Camera::release()
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
- d->pipe_->unlock();
+ if (d->isAcquired())
+ d->pipe_->release();
d->setState(Private::CameraAvailable);
diff --git a/src/libcamera/camera_manager.cpp b/src/libcamera/camera_manager.cpp
index 70d73822..7ff4bc6f 100644
--- a/src/libcamera/camera_manager.cpp
+++ b/src/libcamera/camera_manager.cpp
@@ -189,7 +189,7 @@ void CameraManager::Private::addCamera(std::shared_ptr<Camera> camera,
{
MutexLocker locker(mutex_);
- for (std::shared_ptr<Camera> c : cameras_) {
+ for (const std::shared_ptr<Camera> &c : cameras_) {
if (c->id() == camera->id()) {
LOG(Camera, Fatal)
<< "Trying to register a camera with a duplicated ID '"
diff --git a/src/libcamera/camera_sensor.cpp b/src/libcamera/camera_sensor.cpp
index d055c16a..911fd0be 100644
--- a/src/libcamera/camera_sensor.cpp
+++ b/src/libcamera/camera_sensor.cpp
@@ -467,8 +467,8 @@ int CameraSensor::discoverAncillaryDevices()
ret = focusLens_->init();
if (ret) {
LOG(CameraSensor, Error)
- << "CameraLens initialisation failed";
- return ret;
+ << "Lens initialisation failed, lens disabled";
+ focusLens_.reset();
}
break;
diff --git a/src/libcamera/color_space.cpp b/src/libcamera/color_space.cpp
index caf39760..7356bf7d 100644
--- a/src/libcamera/color_space.cpp
+++ b/src/libcamera/color_space.cpp
@@ -12,6 +12,11 @@
#include <map>
#include <sstream>
#include <utility>
+#include <vector>
+
+#include <libcamera/base/utils.h>
+
+#include "libcamera/internal/formats.h"
/**
* \file color_space.h
@@ -29,21 +34,33 @@ namespace libcamera {
* (sometimes also referred to as the quantisation) of the color space.
*
* Certain combinations of these fields form well-known standard color
- * spaces such as "JPEG" or "REC709".
+ * spaces such as "sRGB" or "Rec709".
*
* In the strictest sense a "color space" formally only refers to the
* color primaries and white point. Here, however, the ColorSpace class
* adopts the common broader usage that includes the transfer function,
* Y'CbCr encoding method and quantisation.
*
- * For more information on the specific color spaces described here, please
- * see:
+ * More information on color spaces is available in the V4L2 documentation, see
+ * in particular
*
* - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-srgb">sRGB</a>
* - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-jpeg">JPEG</a>
* - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-smpte-170m">SMPTE 170M</a>
* - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-rec709">Rec.709</a>
* - <a href="https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/colorspaces-details.html#col-bt2020">Rec.2020</a>
+ *
+ * Note that there is no guarantee of a 1:1 mapping between color space names
+ * and definitions in libcamera and V4L2. Two notable differences are
+ *
+ * - The sRGB libcamera color space is defined for RGB formats only with no
+ * Y'CbCr encoding and a full quantization range, while the V4L2 SRGB color
+ * space has a Y'CbCr encoding and a limited quantization range.
+ * - The sYCC libcamera color space is called JPEG in V4L2 due to historical
+ * reasons.
+ *
+ * \todo Define the color space fully in the libcamera API to avoid referencing
+ * V4L2
*/
/**
@@ -118,11 +135,130 @@ namespace libcamera {
*/
/**
+ * \brief A constant representing a raw color space (from a sensor)
+ */
+const ColorSpace ColorSpace::Raw = {
+ Primaries::Raw,
+ TransferFunction::Linear,
+ YcbcrEncoding::None,
+ Range::Full
+};
+
+/**
+ * \brief A constant representing the sRGB color space (RGB formats only)
+ */
+const ColorSpace ColorSpace::Srgb = {
+ Primaries::Rec709,
+ TransferFunction::Srgb,
+ YcbcrEncoding::None,
+ Range::Full
+};
+
+/**
+ * \brief A constant representing the sYCC color space, typically used for
+ * encoding JPEG images
+ */
+const ColorSpace ColorSpace::Sycc = {
+ Primaries::Rec709,
+ TransferFunction::Srgb,
+ YcbcrEncoding::Rec601,
+ Range::Full
+};
+
+/**
+ * \brief A constant representing the SMPTE170M color space
+ */
+const ColorSpace ColorSpace::Smpte170m = {
+ Primaries::Smpte170m,
+ TransferFunction::Rec709,
+ YcbcrEncoding::Rec601,
+ Range::Limited
+};
+
+/**
+ * \brief A constant representing the Rec.709 color space
+ */
+const ColorSpace ColorSpace::Rec709 = {
+ Primaries::Rec709,
+ TransferFunction::Rec709,
+ YcbcrEncoding::Rec709,
+ Range::Limited
+};
+
+/**
+ * \brief A constant representing the Rec.2020 color space
+ */
+const ColorSpace ColorSpace::Rec2020 = {
+ Primaries::Rec2020,
+ TransferFunction::Rec709,
+ YcbcrEncoding::Rec2020,
+ Range::Limited
+};
+
+/**
+ * \var ColorSpace::primaries
+ * \brief The color primaries of this color space
+ */
+
+/**
+ * \var ColorSpace::transferFunction
+ * \brief The transfer function used by this color space
+ */
+
+/**
+ * \var ColorSpace::ycbcrEncoding
+ * \brief The Y'CbCr encoding used by this color space
+ */
+
+/**
+ * \var ColorSpace::range
+ * \brief The pixel range used with by color space
+ */
+
+namespace {
+
+const std::array<std::pair<ColorSpace, const char *>, 6> colorSpaceNames = { {
+ { ColorSpace::Raw, "RAW" },
+ { ColorSpace::Srgb, "sRGB" },
+ { ColorSpace::Sycc, "sYCC" },
+ { ColorSpace::Smpte170m, "SMPTE170M" },
+ { ColorSpace::Rec709, "Rec709" },
+ { ColorSpace::Rec2020, "Rec2020" },
+} };
+
+const std::map<ColorSpace::Primaries, std::string> primariesNames = {
+ { ColorSpace::Primaries::Raw, "RAW" },
+ { ColorSpace::Primaries::Smpte170m, "SMPTE170M" },
+ { ColorSpace::Primaries::Rec709, "Rec709" },
+ { ColorSpace::Primaries::Rec2020, "Rec2020" },
+};
+
+const std::map<ColorSpace::TransferFunction, std::string> transferNames = {
+ { ColorSpace::TransferFunction::Linear, "Linear" },
+ { ColorSpace::TransferFunction::Srgb, "sRGB" },
+ { ColorSpace::TransferFunction::Rec709, "Rec709" },
+};
+
+const std::map<ColorSpace::YcbcrEncoding, std::string> encodingNames = {
+ { ColorSpace::YcbcrEncoding::None, "None" },
+ { ColorSpace::YcbcrEncoding::Rec601, "Rec601" },
+ { ColorSpace::YcbcrEncoding::Rec709, "Rec709" },
+ { ColorSpace::YcbcrEncoding::Rec2020, "Rec2020" },
+};
+
+const std::map<ColorSpace::Range, std::string> rangeNames = {
+ { ColorSpace::Range::Full, "Full" },
+ { ColorSpace::Range::Limited, "Limited" },
+};
+
+} /* namespace */
+
+/**
* \brief Assemble and return a readable string representation of the
* ColorSpace
*
- * If the color space matches a standard ColorSpace (such as ColorSpace::Jpeg)
- * then the short name of the color space ("JPEG") is returned. Otherwise
+ * If the color space matches a standard ColorSpace (such as ColorSpace::Sycc)
+ * then the short name of the color space ("sYCC") is returned. Otherwise
* the four constituent parts of the ColorSpace are assembled into a longer
* string.
*
@@ -132,14 +268,6 @@ std::string ColorSpace::toString() const
{
/* Print out a brief name only for standard color spaces. */
- static const std::array<std::pair<ColorSpace, const char *>, 6> colorSpaceNames = { {
- { ColorSpace::Raw, "RAW" },
- { ColorSpace::Jpeg, "JPEG" },
- { ColorSpace::Srgb, "sRGB" },
- { ColorSpace::Smpte170m, "SMPTE170M" },
- { ColorSpace::Rec709, "Rec709" },
- { ColorSpace::Rec2020, "Rec2020" },
- } };
auto it = std::find_if(colorSpaceNames.begin(), colorSpaceNames.end(),
[this](const auto &item) {
return *this == item.first;
@@ -149,28 +277,6 @@ std::string ColorSpace::toString() const
/* Assemble a name made of the constituent fields. */
- static const std::map<Primaries, std::string> primariesNames = {
- { Primaries::Raw, "RAW" },
- { Primaries::Smpte170m, "SMPTE170M" },
- { Primaries::Rec709, "Rec709" },
- { Primaries::Rec2020, "Rec2020" },
- };
- static const std::map<TransferFunction, std::string> transferNames = {
- { TransferFunction::Linear, "Linear" },
- { TransferFunction::Srgb, "sRGB" },
- { TransferFunction::Rec709, "Rec709" },
- };
- static const std::map<YcbcrEncoding, std::string> encodingNames = {
- { YcbcrEncoding::None, "None" },
- { YcbcrEncoding::Rec601, "Rec601" },
- { YcbcrEncoding::Rec709, "Rec709" },
- { YcbcrEncoding::Rec2020, "Rec2020" },
- };
- static const std::map<Range, std::string> rangeNames = {
- { Range::Full, "Full" },
- { Range::Limited, "Limited" },
- };
-
auto itPrimaries = primariesNames.find(primaries);
std::string primariesName =
itPrimaries == primariesNames.end() ? "Invalid" : itPrimaries->second;
@@ -213,88 +319,185 @@ std::string ColorSpace::toString(const std::optional<ColorSpace> &colorSpace)
}
/**
- * \var ColorSpace::primaries
- * \brief The color primaries of this color space
- */
-
-/**
- * \var ColorSpace::transferFunction
- * \brief The transfer function used by this color space
- */
-
-/**
- * \var ColorSpace::ycbcrEncoding
- * \brief The Y'CbCr encoding used by this color space
- */
-
-/**
- * \var ColorSpace::range
- * \brief The pixel range used with by color space
- */
-
-/**
- * \brief A constant representing a raw color space (from a sensor)
- */
-const ColorSpace ColorSpace::Raw = {
- Primaries::Raw,
- TransferFunction::Linear,
- YcbcrEncoding::None,
- Range::Full
-};
-
-/**
- * \brief A constant representing the JPEG color space used for
- * encoding JPEG images
- */
-const ColorSpace ColorSpace::Jpeg = {
- Primaries::Rec709,
- TransferFunction::Srgb,
- YcbcrEncoding::Rec601,
- Range::Full
-};
-
-/**
- * \brief A constant representing the sRGB color space
+ * \brief Construct a color space from a string
+ * \param[in] str The string
*
- * This is identical to the JPEG color space except that the Y'CbCr
- * range is limited rather than full.
- */
-const ColorSpace ColorSpace::Srgb = {
- Primaries::Rec709,
- TransferFunction::Srgb,
- YcbcrEncoding::Rec601,
- Range::Limited
-};
-
-/**
- * \brief A constant representing the SMPTE170M color space
- */
-const ColorSpace ColorSpace::Smpte170m = {
- Primaries::Smpte170m,
- TransferFunction::Rec709,
- YcbcrEncoding::Rec601,
- Range::Limited
-};
-
-/**
- * \brief A constant representing the Rec.709 color space
+ * The string \a str can contain the name of a well-known color space, or be
+ * made of the four color space components separated by a '/' character, ordered
+ * as
+ *
+ * \verbatim primaries '/' transferFunction '/' ycbcrEncoding '/' range \endverbatim
+ *
+ * Any failure to parse the string, either because it doesn't match the expected
+ * format, or because the one of the names isn't recognized, will cause this
+ * function to return std::nullopt.
+ *
+ * \return The ColorSpace corresponding to the string, or std::nullopt if the
+ * string doesn't describe a known color space
*/
-const ColorSpace ColorSpace::Rec709 = {
- Primaries::Rec709,
- TransferFunction::Rec709,
- YcbcrEncoding::Rec709,
- Range::Limited
-};
+std::optional<ColorSpace> ColorSpace::fromString(const std::string &str)
+{
+ /* First search for a standard color space name match. */
+ auto itColorSpace = std::find_if(colorSpaceNames.begin(), colorSpaceNames.end(),
+ [&str](const auto &item) {
+ return str == item.second;
+ });
+ if (itColorSpace != colorSpaceNames.end())
+ return itColorSpace->first;
+
+ /*
+ * If not found, the string must contain the four color space
+ * components separated by a '/' character.
+ */
+ const auto &split = utils::split(str, "/");
+ std::vector<std::string> components{ split.begin(), split.end() };
+
+ if (components.size() != 4)
+ return std::nullopt;
+
+ ColorSpace colorSpace = ColorSpace::Raw;
+
+ /* Color primaries */
+ auto itPrimaries = std::find_if(primariesNames.begin(), primariesNames.end(),
+ [&components](const auto &item) {
+ return components[0] == item.second;
+ });
+ if (itPrimaries == primariesNames.end())
+ return std::nullopt;
+
+ colorSpace.primaries = itPrimaries->first;
+
+ /* Transfer function */
+ auto itTransfer = std::find_if(transferNames.begin(), transferNames.end(),
+ [&components](const auto &item) {
+ return components[1] == item.second;
+ });
+ if (itTransfer == transferNames.end())
+ return std::nullopt;
+
+ colorSpace.transferFunction = itTransfer->first;
+
+ /* YCbCr encoding */
+ auto itEncoding = std::find_if(encodingNames.begin(), encodingNames.end(),
+ [&components](const auto &item) {
+ return components[2] == item.second;
+ });
+ if (itEncoding == encodingNames.end())
+ return std::nullopt;
+
+ colorSpace.ycbcrEncoding = itEncoding->first;
+
+ /* Quantization range */
+ auto itRange = std::find_if(rangeNames.begin(), rangeNames.end(),
+ [&components](const auto &item) {
+ return components[3] == item.second;
+ });
+ if (itRange == rangeNames.end())
+ return std::nullopt;
+
+ colorSpace.range = itRange->first;
+
+ return colorSpace;
+}
/**
- * \brief A constant representing the Rec.2020 color space
+ * \brief Adjust the color space to match a pixel format
+ * \param[in] format The pixel format
+ *
+ * Not all combinations of pixel formats and color spaces make sense. For
+ * instance, nobody uses a limited quantization range with raw Bayer formats,
+ * and the YcbcrEncoding::None encoding isn't valid for YUV formats. This
+ * function adjusts the ColorSpace to make it compatible with the given \a
+ * format, by applying the following rules:
+ *
+ * - The color space for RAW formats must be Raw.
+ * - The Y'CbCr encoding and quantization range for RGB formats must be
+ * YcbcrEncoding::None and Range::Full respectively.
+ * - The Y'CbCr encoding for YUV formats must not be YcbcrEncoding::None. The
+ * best encoding is in that case guessed based on the primaries and transfer
+ * function.
+ *
+ * \return True if the color space has been adjusted, or false if it was
+ * already compatible with the format and hasn't been changed
*/
-const ColorSpace ColorSpace::Rec2020 = {
- Primaries::Rec2020,
- TransferFunction::Rec709,
- YcbcrEncoding::Rec2020,
- Range::Limited
-};
+bool ColorSpace::adjust(PixelFormat format)
+{
+ const PixelFormatInfo &info = PixelFormatInfo::info(format);
+ bool adjusted = false;
+
+ switch (info.colourEncoding) {
+ case PixelFormatInfo::ColourEncodingRAW:
+ /* Raw formats must use the raw color space. */
+ if (*this != ColorSpace::Raw) {
+ *this = ColorSpace::Raw;
+ adjusted = true;
+ }
+ break;
+
+ case PixelFormatInfo::ColourEncodingRGB:
+ /*
+ * RGB formats can't have a Y'CbCr encoding, and must use full
+ * range quantization.
+ */
+ if (ycbcrEncoding != YcbcrEncoding::None) {
+ ycbcrEncoding = YcbcrEncoding::None;
+ adjusted = true;
+ }
+
+ if (range != Range::Full) {
+ range = Range::Full;
+ adjusted = true;
+ }
+ break;
+
+ case PixelFormatInfo::ColourEncodingYUV:
+ if (ycbcrEncoding != YcbcrEncoding::None)
+ break;
+
+ /*
+ * YUV formats must have a Y'CbCr encoding. Infer the most
+ * probable option from the transfer function and primaries.
+ */
+ switch (transferFunction) {
+ case TransferFunction::Linear:
+ /*
+ * Linear YUV is not used in any standard color space,
+ * pick the widely supported and used Rec601 as default.
+ */
+ ycbcrEncoding = YcbcrEncoding::Rec601;
+ break;
+
+ case TransferFunction::Rec709:
+ switch (primaries) {
+ /* Raw should never happen. */
+ case Primaries::Raw:
+ case Primaries::Smpte170m:
+ ycbcrEncoding = YcbcrEncoding::Rec601;
+ break;
+ case Primaries::Rec709:
+ ycbcrEncoding = YcbcrEncoding::Rec709;
+ break;
+ case Primaries::Rec2020:
+ ycbcrEncoding = YcbcrEncoding::Rec2020;
+ break;
+ }
+ break;
+
+ case TransferFunction::Srgb:
+ /*
+ * Only the sYCC color space uses the sRGB transfer
+ * function, the corresponding encoding is Rec601.
+ */
+ ycbcrEncoding = YcbcrEncoding::Rec601;
+ break;
+ }
+
+ adjusted = true;
+ break;
+ }
+
+ return adjusted;
+}
/**
* \brief Compare color spaces for equality
diff --git a/src/libcamera/control_serializer.cpp b/src/libcamera/control_serializer.cpp
index e87d2362..0cf719bd 100644
--- a/src/libcamera/control_serializer.cpp
+++ b/src/libcamera/control_serializer.cpp
@@ -144,7 +144,7 @@ void ControlSerializer::reset()
size_t ControlSerializer::binarySize(const ControlValue &value)
{
- return value.data().size_bytes();
+ return sizeof(ControlType) + value.data().size_bytes();
}
size_t ControlSerializer::binarySize(const ControlInfo &info)
@@ -195,6 +195,8 @@ size_t ControlSerializer::binarySize(const ControlList &list)
void ControlSerializer::store(const ControlValue &value,
ByteStreamBuffer &buffer)
{
+ const ControlType type = value.type();
+ buffer.write(&type);
buffer.write(value.data());
}
@@ -379,11 +381,13 @@ int ControlSerializer::serialize(const ControlList &list,
return 0;
}
-ControlValue ControlSerializer::loadControlValue(ControlType type,
- ByteStreamBuffer &buffer,
+ControlValue ControlSerializer::loadControlValue(ByteStreamBuffer &buffer,
bool isArray,
unsigned int count)
{
+ ControlType type;
+ buffer.read(&type);
+
ControlValue value;
value.reserve(type, isArray, count);
@@ -392,15 +396,11 @@ ControlValue ControlSerializer::loadControlValue(ControlType type,
return value;
}
-ControlInfo ControlSerializer::loadControlInfo(ControlType type,
- ByteStreamBuffer &b)
+ControlInfo ControlSerializer::loadControlInfo(ByteStreamBuffer &b)
{
- if (type == ControlTypeString)
- type = ControlTypeInteger32;
-
- ControlValue min = loadControlValue(type, b);
- ControlValue max = loadControlValue(type, b);
- ControlValue def = loadControlValue(type, b);
+ ControlValue min = loadControlValue(b);
+ ControlValue max = loadControlValue(b);
+ ControlValue def = loadControlValue(b);
return ControlInfo(min, max, def);
}
@@ -513,7 +513,7 @@ ControlInfoMap ControlSerializer::deserialize<ControlInfoMap>(ByteStreamBuffer &
}
/* Create and store the ControlInfo. */
- ctrls.emplace(controlId, loadControlInfo(type, values));
+ ctrls.emplace(controlId, loadControlInfo(values));
}
/*
@@ -624,10 +624,8 @@ ControlList ControlSerializer::deserialize<ControlList>(ByteStreamBuffer &buffer
return {};
}
- ControlType type = static_cast<ControlType>(entry->type);
ctrls.set(entry->id,
- loadControlValue(type, values, entry->is_array,
- entry->count));
+ loadControlValue(values, entry->is_array, entry->count));
}
return ctrls;
diff --git a/src/libcamera/device_enumerator.cpp b/src/libcamera/device_enumerator.cpp
index d1258050..f2e055de 100644
--- a/src/libcamera/device_enumerator.cpp
+++ b/src/libcamera/device_enumerator.cpp
@@ -161,7 +161,7 @@ std::unique_ptr<DeviceEnumerator> DeviceEnumerator::create()
DeviceEnumerator::~DeviceEnumerator()
{
- for (std::shared_ptr<MediaDevice> media : devices_) {
+ for (const std::shared_ptr<MediaDevice> &media : devices_) {
if (media->busy())
LOG(DeviceEnumerator, Error)
<< "Removing media device " << media->deviceNode()
diff --git a/src/libcamera/ipa_manager.cpp b/src/libcamera/ipa_manager.cpp
index 2f96a207..030ef43f 100644
--- a/src/libcamera/ipa_manager.cpp
+++ b/src/libcamera/ipa_manager.cpp
@@ -109,8 +109,10 @@ IPAManager::IPAManager()
LOG(IPAManager, Fatal)
<< "Multiple IPAManager objects are not allowed";
+#if HAVE_IPA_PUBKEY
if (!pubKey_.isValid())
LOG(IPAManager, Warning) << "Public key not valid";
+#endif
unsigned int ipaCount = 0;
diff --git a/src/libcamera/media_device.cpp b/src/libcamera/media_device.cpp
index 7c94da9e..52c8e66e 100644
--- a/src/libcamera/media_device.cpp
+++ b/src/libcamera/media_device.cpp
@@ -352,8 +352,9 @@ MediaEntity *MediaDevice::getEntityByName(const std::string &name) const
* entity with name \a sourceName, to the pad at index \a sinkIdx of the
* sink entity with name \a sinkName, if any.
*
- * \sa MediaDevice::link(const MediaEntity *source, unsigned int sourceIdx, const MediaEntity *sink, unsigned int sinkIdx) const
- * \sa MediaDevice::link(const MediaPad *source, const MediaPad *sink) const
+ * \sa link(const MediaEntity *source, unsigned int sourceIdx,
+ * const MediaEntity *sink, unsigned int sinkIdx)
+ * \sa link(const MediaPad *source, const MediaPad *sink)
*
* \return The link that connects the two pads, or nullptr if no such a link
* exists
@@ -381,8 +382,9 @@ MediaLink *MediaDevice::link(const std::string &sourceName, unsigned int sourceI
* entity \a source, to the pad at index \a sinkIdx of the sink entity \a
* sink, if any.
*
- * \sa MediaDevice::link(const std::string &sourceName, unsigned int sourceIdx, const std::string &sinkName, unsigned int sinkIdx) const
- * \sa MediaDevice::link(const MediaPad *source, const MediaPad *sink) const
+ * \sa link(const std::string &sourceName, unsigned int sourceIdx,
+ * const std::string &sinkName, unsigned int sinkIdx)
+ * \sa link(const MediaPad *source, const MediaPad *sink)
*
* \return The link that connects the two pads, or nullptr if no such a link
* exists
@@ -404,8 +406,10 @@ MediaLink *MediaDevice::link(const MediaEntity *source, unsigned int sourceIdx,
* \param[in] source The source pad
* \param[in] sink The sink pad
*
- * \sa MediaDevice::link(const std::string &sourceName, unsigned int sourceIdx, const std::string &sinkName, unsigned int sinkIdx) const
- * \sa MediaDevice::link(const MediaEntity *source, unsigned int sourceIdx, const MediaEntity *sink, unsigned int sinkIdx) const
+ * \sa link(const std::string &sourceName, unsigned int sourceIdx,
+ * const std::string &sinkName, unsigned int sinkIdx)
+ * \sa link(const MediaEntity *source, unsigned int sourceIdx,
+ * const MediaEntity *sink, unsigned int sinkIdx)
*
* \return The link that connects the two pads, or nullptr if no such a link
* exists
diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
index ce1f0f2f..63b47b17 100644
--- a/src/libcamera/meson.build
+++ b/src/libcamera/meson.build
@@ -189,4 +189,6 @@ pkg_mod.generate(libcamera,
description : 'Complex Camera Support Library',
subdirs : 'libcamera')
+meson.override_dependency('libcamera', libcamera_public)
+
subdir('proxy/worker')
diff --git a/src/libcamera/pipeline/ipu3/ipu3.cpp b/src/libcamera/pipeline/ipu3/ipu3.cpp
index 9df24603..93219a6c 100644
--- a/src/libcamera/pipeline/ipu3/ipu3.cpp
+++ b/src/libcamera/pipeline/ipu3/ipu3.cpp
@@ -243,6 +243,7 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
*/
unsigned int rawCount = 0;
unsigned int yuvCount = 0;
+ Size rawRequirement;
Size maxYuvSize;
Size rawSize;
@@ -251,10 +252,11 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW) {
rawCount++;
- rawSize.expandTo(cfg.size);
+ rawSize = std::max(rawSize, cfg.size);
} else {
yuvCount++;
- maxYuvSize.expandTo(cfg.size);
+ maxYuvSize = std::max(maxYuvSize, cfg.size);
+ rawRequirement.expandTo(cfg.size);
}
}
@@ -283,17 +285,17 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
* The output YUV streams will be limited in size to the maximum frame
* size requested for the RAW stream, if present.
*
- * If no raw stream is requested generate a size as large as the maximum
- * requested YUV size aligned to the ImgU constraints and bound by the
- * sensor's maximum resolution. See
+ * If no raw stream is requested, generate a size from the largest YUV
+ * stream, aligned to the ImgU constraints and bound
+ * by the sensor's maximum resolution. See
* https://bugs.libcamera.org/show_bug.cgi?id=32
*/
if (rawSize.isNull())
- rawSize = maxYuvSize.expandedTo({ ImgUDevice::kIFMaxCropWidth,
- ImgUDevice::kIFMaxCropHeight })
- .grownBy({ ImgUDevice::kOutputMarginWidth,
- ImgUDevice::kOutputMarginHeight })
- .boundedTo(data_->cio2_.sensor()->resolution());
+ rawSize = rawRequirement.expandedTo({ ImgUDevice::kIFMaxCropWidth,
+ ImgUDevice::kIFMaxCropHeight })
+ .grownBy({ ImgUDevice::kOutputMarginWidth,
+ ImgUDevice::kOutputMarginHeight })
+ .boundedTo(data_->cio2_.sensor()->resolution());
cio2Configuration_ = data_->cio2_.generateConfiguration(rawSize);
if (!cio2Configuration_.pixelFormat.isValid())
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index e895584d..dcd81650 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -253,9 +253,14 @@ public:
* thread. So, we do not need to have any mutex to protect access to any
* of the variables below.
*/
- enum class State { Stopped, Idle, Busy, IpaComplete };
+ enum class State { Stopped, Idle, Busy, IpaComplete, Error };
State state_;
+ bool isRunning()
+ {
+ return state_ != State::Stopped && state_ != State::Error;
+ }
+
struct BayerFrame {
FrameBuffer *buffer;
ControlList controls;
@@ -593,11 +598,11 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
fmts = data->isp_[Isp::Output0].dev()->formats();
pixelFormat = formats::NV12;
/*
- * Still image codecs usually expect the JPEG color space.
+ * Still image codecs usually expect the sYCC color space.
* Even RGB codecs will be fine as the RGB we get with the
- * JPEG color space is the same as sRGB.
+ * sYCC color space is the same as sRGB.
*/
- colorSpace = ColorSpace::Jpeg;
+ colorSpace = ColorSpace::Sycc;
/* Return the largest sensor resolution. */
size = sensorSize;
bufferCount = 1;
@@ -628,7 +633,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
case StreamRole::Viewfinder:
fmts = data->isp_[Isp::Output0].dev()->formats();
pixelFormat = formats::ARGB8888;
- colorSpace = ColorSpace::Jpeg;
+ colorSpace = ColorSpace::Sycc;
size = { 800, 600 };
bufferCount = 4;
outCount++;
@@ -835,7 +840,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
format.size = maxSize;
format.fourcc = dev->toV4L2PixelFormat(formats::YUV420);
/* No one asked for output, so the color space doesn't matter. */
- format.colorSpace = ColorSpace::Jpeg;
+ format.colorSpace = ColorSpace::Sycc;
ret = dev->setFormat(&format);
if (ret) {
LOG(RPI, Error)
@@ -1109,7 +1114,7 @@ int PipelineHandlerRPi::queueRequestDevice(Camera *camera, Request *request)
{
RPiCameraData *data = cameraData(camera);
- if (data->state_ == RPiCameraData::State::Stopped)
+ if (!data->isRunning())
return -EINVAL;
LOG(RPI, Debug) << "queueRequestDevice: New request.";
@@ -1708,7 +1713,7 @@ void RPiCameraData::enumerateVideoDevices(MediaLink *link)
void RPiCameraData::statsMetadataComplete(uint32_t bufferId, const ControlList &controls)
{
- if (state_ == State::Stopped)
+ if (!isRunning())
return;
FrameBuffer *buffer = isp_[Isp::Stats].getBuffers().at(bufferId);
@@ -1744,7 +1749,7 @@ void RPiCameraData::statsMetadataComplete(uint32_t bufferId, const ControlList &
void RPiCameraData::runIsp(uint32_t bufferId)
{
- if (state_ == State::Stopped)
+ if (!isRunning())
return;
FrameBuffer *buffer = unicam_[Unicam::Image].getBuffers().at(bufferId);
@@ -1759,7 +1764,7 @@ void RPiCameraData::runIsp(uint32_t bufferId)
void RPiCameraData::embeddedComplete(uint32_t bufferId)
{
- if (state_ == State::Stopped)
+ if (!isRunning())
return;
FrameBuffer *buffer = unicam_[Unicam::Embedded].getBuffers().at(bufferId);
@@ -1818,6 +1823,17 @@ void RPiCameraData::unicamTimeout()
LOG(RPI, Error) << "Unicam has timed out!";
LOG(RPI, Error) << "Please check that your camera sensor connector is attached securely.";
LOG(RPI, Error) << "Alternatively, try another cable and/or sensor.";
+
+ state_ = RPiCameraData::State::Error;
+ /*
+ * To allow the application to attempt a recovery from this timeout,
+ * stop all devices streaming, and return any outstanding requests as
+ * incomplete and cancelled.
+ */
+ for (auto const stream : streams_)
+ stream->dev()->streamOff();
+
+ clearIncompleteRequests();
}
void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
@@ -1825,7 +1841,7 @@ void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
RPi::Stream *stream = nullptr;
int index;
- if (state_ == State::Stopped)
+ if (!isRunning())
return;
for (RPi::Stream &s : unicam_) {
@@ -1864,7 +1880,7 @@ void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
void RPiCameraData::ispInputDequeue(FrameBuffer *buffer)
{
- if (state_ == State::Stopped)
+ if (!isRunning())
return;
LOG(RPI, Debug) << "Stream ISP Input buffer complete"
@@ -1881,7 +1897,7 @@ void RPiCameraData::ispOutputDequeue(FrameBuffer *buffer)
RPi::Stream *stream = nullptr;
int index;
- if (state_ == State::Stopped)
+ if (!isRunning())
return;
for (RPi::Stream &s : isp_) {
@@ -1991,6 +2007,7 @@ void RPiCameraData::handleState()
switch (state_) {
case State::Stopped:
case State::Busy:
+ case State::Error:
break;
case State::IpaComplete:
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
index 93287332..25fbf9f1 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
+++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
@@ -18,6 +18,7 @@
#include <libcamera/base/utils.h>
#include <libcamera/camera.h>
+#include <libcamera/color_space.h>
#include <libcamera/control_ids.h>
#include <libcamera/formats.h>
#include <libcamera/framebuffer.h>
@@ -416,11 +417,13 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
{
const CameraSensor *sensor = data_->sensor_.get();
unsigned int pathCount = data_->selfPath_ ? 2 : 1;
- Status status = Valid;
+ Status status;
if (config_.empty())
return Invalid;
+ status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace);
+
if (transform != Transform::Identity) {
transform = Transform::Identity;
status = Adjusted;
@@ -547,21 +550,44 @@ CameraConfiguration *PipelineHandlerRkISP1::generateConfiguration(Camera *camera
if (roles.empty())
return config;
+ /*
+ * As the ISP can't output different color spaces for the main and self
+ * path, pick a sensible default color space based on the role of the
+ * first stream and use it for all streams.
+ */
+ std::optional<ColorSpace> colorSpace;
+
bool mainPathAvailable = true;
bool selfPathAvailable = data->selfPath_;
+
for (const StreamRole role : roles) {
bool useMainPath;
switch (role) {
- case StreamRole::StillCapture: {
+ case StreamRole::StillCapture:
useMainPath = mainPathAvailable;
+ /* JPEG encoders typically expect sYCC. */
+ if (!colorSpace)
+ colorSpace = ColorSpace::Sycc;
break;
- }
+
case StreamRole::Viewfinder:
- case StreamRole::VideoRecording: {
useMainPath = !selfPathAvailable;
+ /*
+ * sYCC is the YCbCr encoding of sRGB, which is commonly
+ * used by displays.
+ */
+ if (!colorSpace)
+ colorSpace = ColorSpace::Sycc;
break;
- }
+
+ case StreamRole::VideoRecording:
+ useMainPath = !selfPathAvailable;
+ /* Rec. 709 is a good default for HD video recording. */
+ if (!colorSpace)
+ colorSpace = ColorSpace::Rec709;
+ break;
+
default:
LOG(RkISP1, Warning)
<< "Requested stream role not supported: " << role;
@@ -580,6 +606,7 @@ CameraConfiguration *PipelineHandlerRkISP1::generateConfiguration(Camera *camera
selfPathAvailable = false;
}
+ cfg.colorSpace = colorSpace;
config->addConfiguration(cfg);
}
@@ -642,6 +669,7 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
if (ret < 0)
return ret;
+ format.colorSpace = config->at(0).colorSpace;
ret = isp_->setFormat(2, &format);
if (ret < 0)
return ret;
@@ -688,7 +716,6 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
/* \todo Turn this into a hard failure. */
LOG(RkISP1, Warning) << "Camera sensor information not available";
sensorInfo = {};
- ret = 0;
}
std::map<uint32_t, ControlInfoMap> entityControls;
diff --git a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
index fbe02cdc..a2819545 100644
--- a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
+++ b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
@@ -46,8 +46,16 @@ public:
ControlInfoMap::Map *ctrls);
void bufferReady(FrameBuffer *buffer);
+ const std::string &id() const { return id_; }
+
std::unique_ptr<V4L2VideoDevice> video_;
Stream stream_;
+ std::map<PixelFormat, std::vector<SizeRange>> formats_;
+
+private:
+ bool generateId();
+
+ std::string id_;
};
class UVCCameraConfiguration : public CameraConfiguration
@@ -81,8 +89,6 @@ public:
bool match(DeviceEnumerator *enumerator) override;
private:
- std::string generateId(const UVCCameraData *data);
-
int processControl(ControlList *controls, unsigned int id,
const ControlValue &value);
int processControls(UVCCameraData *data, Request *request);
@@ -159,6 +165,11 @@ CameraConfiguration::Status UVCCameraConfiguration::validate()
cfg.stride = format.planes[0].bpl;
cfg.frameSize = format.planes[0].size;
+ if (cfg.colorSpace != format.colorSpace) {
+ cfg.colorSpace = format.colorSpace;
+ status = Adjusted;
+ }
+
return status;
}
@@ -176,15 +187,7 @@ CameraConfiguration *PipelineHandlerUVC::generateConfiguration(Camera *camera,
if (roles.empty())
return config;
- V4L2VideoDevice::Formats v4l2Formats = data->video_->formats();
- std::map<PixelFormat, std::vector<SizeRange>> deviceFormats;
- for (const auto &format : v4l2Formats) {
- PixelFormat pixelFormat = format.first.toPixelFormat();
- if (pixelFormat.isValid())
- deviceFormats[pixelFormat] = format.second;
- }
-
- StreamFormats formats(deviceFormats);
+ StreamFormats formats(data->formats_);
StreamConfiguration cfg(formats);
cfg.pixelFormat = formats.pixelformats().front();
@@ -340,12 +343,8 @@ int PipelineHandlerUVC::processControls(UVCCameraData *data, Request *request)
{
ControlList controls(data->video_->controls());
- for (auto it : request->controls()) {
- unsigned int id = it.first;
- ControlValue &value = it.second;
-
+ for (const auto &[id, value] : request->controls())
processControl(&controls, id, value);
- }
for (const auto &ctrl : controls)
LOG(UVC, Debug)
@@ -383,69 +382,6 @@ int PipelineHandlerUVC::queueRequestDevice(Camera *camera, Request *request)
return 0;
}
-std::string PipelineHandlerUVC::generateId(const UVCCameraData *data)
-{
- const std::string path = data->video_->devicePath();
-
- /* Create a controller ID from first device described in firmware. */
- std::string controllerId;
- std::string searchPath = path;
- while (true) {
- std::string::size_type pos = searchPath.rfind('/');
- if (pos <= 1) {
- LOG(UVC, Error) << "Can not find controller ID";
- return {};
- }
-
- searchPath = searchPath.substr(0, pos);
-
- controllerId = sysfs::firmwareNodePath(searchPath);
- if (!controllerId.empty())
- break;
- }
-
- /*
- * Create a USB ID from the device path which has the known format:
- *
- * path = bus, "-", ports, ":", config, ".", interface ;
- * bus = number ;
- * ports = port, [ ".", ports ] ;
- * port = number ;
- * config = number ;
- * interface = number ;
- *
- * Example: 3-2.4:1.0
- *
- * The bus is not guaranteed to be stable and needs to be stripped from
- * the USB ID. The final USB ID is built up of the ports, config and
- * interface properties.
- *
- * Example 2.4:1.0.
- */
- std::string usbId = utils::basename(path.c_str());
- usbId = usbId.substr(usbId.find('-') + 1);
-
- /* Creata a device ID from the USB devices vendor and product ID. */
- std::string deviceId;
- for (const char *name : { "idVendor", "idProduct" }) {
- std::ifstream file(path + "/../" + name);
-
- if (!file.is_open())
- return {};
-
- std::string value;
- std::getline(file, value);
- file.close();
-
- if (!deviceId.empty())
- deviceId += ":";
-
- deviceId += value;
- }
-
- return controllerId + "-" + usbId + "-" + deviceId;
-}
-
bool PipelineHandlerUVC::match(DeviceEnumerator *enumerator)
{
MediaDevice *media;
@@ -461,12 +397,7 @@ bool PipelineHandlerUVC::match(DeviceEnumerator *enumerator)
return false;
/* Create and register the camera. */
- std::string id = generateId(data.get());
- if (id.empty()) {
- LOG(UVC, Error) << "Failed to generate camera ID";
- return false;
- }
-
+ std::string id = data->id();
std::set<Stream *> streams{ &data->stream_ };
std::shared_ptr<Camera> camera =
Camera::create(std::move(data), id, streams);
@@ -501,6 +432,39 @@ int UVCCameraData::init(MediaDevice *media)
video_->bufferReady.connect(this, &UVCCameraData::bufferReady);
+ /* Generate the camera ID. */
+ if (!generateId()) {
+ LOG(UVC, Error) << "Failed to generate camera ID";
+ return -EINVAL;
+ }
+
+ /*
+ * Populate the map of supported formats, and infer the camera sensor
+ * resolution from the largest size it advertises.
+ */
+ Size resolution;
+ for (const auto &format : video_->formats()) {
+ PixelFormat pixelFormat = format.first.toPixelFormat();
+ if (!pixelFormat.isValid())
+ continue;
+
+ formats_[pixelFormat] = format.second;
+
+ const std::vector<SizeRange> &sizeRanges = format.second;
+ for (const SizeRange &sizeRange : sizeRanges) {
+ if (sizeRange.max > resolution)
+ resolution = sizeRange.max;
+ }
+ }
+
+ if (formats_.empty()) {
+ LOG(UVC, Error)
+ << "Camera " << id_ << " (" << media->model()
+ << ") doesn't expose any supported format";
+ return -EINVAL;
+ }
+
+ /* Populate the camera properties. */
properties_.set(properties::Model, utils::toAscii(media->model()));
/*
@@ -531,19 +495,6 @@ int UVCCameraData::init(MediaDevice *media)
properties_.set(properties::Location, location);
- /*
- * Get the current format in order to initialize the sensor array
- * properties.
- */
- Size resolution;
- for (const auto &it : video_->formats()) {
- const std::vector<SizeRange> &sizeRanges = it.second;
- for (const SizeRange &sizeRange : sizeRanges) {
- if (sizeRange.max > resolution)
- resolution = sizeRange.max;
- }
- }
-
properties_.set(properties::PixelArraySize, resolution);
properties_.set(properties::PixelArrayActiveAreas, { Rectangle(resolution) });
@@ -562,6 +513,70 @@ int UVCCameraData::init(MediaDevice *media)
return 0;
}
+bool UVCCameraData::generateId()
+{
+ const std::string path = video_->devicePath();
+
+ /* Create a controller ID from first device described in firmware. */
+ std::string controllerId;
+ std::string searchPath = path;
+ while (true) {
+ std::string::size_type pos = searchPath.rfind('/');
+ if (pos <= 1) {
+ LOG(UVC, Error) << "Can not find controller ID";
+ return false;
+ }
+
+ searchPath = searchPath.substr(0, pos);
+
+ controllerId = sysfs::firmwareNodePath(searchPath);
+ if (!controllerId.empty())
+ break;
+ }
+
+ /*
+ * Create a USB ID from the device path which has the known format:
+ *
+ * path = bus, "-", ports, ":", config, ".", interface ;
+ * bus = number ;
+ * ports = port, [ ".", ports ] ;
+ * port = number ;
+ * config = number ;
+ * interface = number ;
+ *
+ * Example: 3-2.4:1.0
+ *
+ * The bus is not guaranteed to be stable and needs to be stripped from
+ * the USB ID. The final USB ID is built up of the ports, config and
+ * interface properties.
+ *
+ * Example 2.4:1.0.
+ */
+ std::string usbId = utils::basename(path.c_str());
+ usbId = usbId.substr(usbId.find('-') + 1);
+
+ /* Creata a device ID from the USB devices vendor and product ID. */
+ std::string deviceId;
+ for (const char *name : { "idVendor", "idProduct" }) {
+ std::ifstream file(path + "/../" + name);
+
+ if (!file.is_open())
+ return false;
+
+ std::string value;
+ std::getline(file, value);
+ file.close();
+
+ if (!deviceId.empty())
+ deviceId += ":";
+
+ deviceId += value;
+ }
+
+ id_ = controllerId + "-" + usbId + "-" + deviceId;
+ return true;
+}
+
void UVCCameraData::addControl(uint32_t cid, const ControlInfo &v4l2Info,
ControlInfoMap::Map *ctrls)
{
diff --git a/src/libcamera/pipeline/vimc/vimc.cpp b/src/libcamera/pipeline/vimc/vimc.cpp
index 153cf849..d2f2e460 100644
--- a/src/libcamera/pipeline/vimc/vimc.cpp
+++ b/src/libcamera/pipeline/vimc/vimc.cpp
@@ -378,7 +378,7 @@ int PipelineHandlerVimc::processControls(VimcCameraData *data, Request *request)
{
ControlList controls(data->sensor_->controls());
- for (auto it : request->controls()) {
+ for (const auto &it : request->controls()) {
unsigned int id = it.first;
unsigned int offset;
uint32_t cid;
diff --git a/src/libcamera/pipeline_handler.cpp b/src/libcamera/pipeline_handler.cpp
index 67540533..e5cb751c 100644
--- a/src/libcamera/pipeline_handler.cpp
+++ b/src/libcamera/pipeline_handler.cpp
@@ -68,7 +68,7 @@ LOG_DEFINE_CATEGORY(Pipeline)
* respective factories.
*/
PipelineHandler::PipelineHandler(CameraManager *manager)
- : manager_(manager), lockOwner_(false)
+ : manager_(manager), useCount_(0)
{
}
@@ -143,58 +143,75 @@ MediaDevice *PipelineHandler::acquireMediaDevice(DeviceEnumerator *enumerator,
}
/**
- * \brief Lock all media devices acquired by the pipeline
+ * \brief Acquire exclusive access to the pipeline handler for the process
*
- * This function shall not be called from pipeline handler implementation, as
- * the Camera class handles locking directly.
+ * This function locks all the media devices used by the pipeline to ensure
+ * that no other process can access them concurrently.
+ *
+ * Access to a pipeline handler may be acquired recursively from within the
+ * same process. Every successful acquire() call shall be matched with a
+ * release() call. This allows concurrent access to the same pipeline handler
+ * from different cameras within the same process.
+ *
+ * Pipeline handlers shall not call this function directly as the Camera class
+ * handles access internally.
*
* \context This function is \threadsafe.
*
- * \return True if the devices could be locked, false otherwise
- * \sa unlock()
- * \sa MediaDevice::lock()
+ * \return True if the pipeline handler was acquired, false if another process
+ * has already acquired it
+ * \sa release()
*/
-bool PipelineHandler::lock()
+bool PipelineHandler::acquire()
{
MutexLocker locker(lock_);
- /* Do not allow nested locking in the same libcamera instance. */
- if (lockOwner_)
- return false;
+ if (useCount_) {
+ ++useCount_;
+ return true;
+ }
for (std::shared_ptr<MediaDevice> &media : mediaDevices_) {
if (!media->lock()) {
- unlock();
+ unlockMediaDevices();
return false;
}
}
- lockOwner_ = true;
-
+ ++useCount_;
return true;
}
/**
- * \brief Unlock all media devices acquired by the pipeline
+ * \brief Release exclusive access to the pipeline handler
+ *
+ * This function releases access to the pipeline handler previously acquired by
+ * a call to acquire(). Every release() call shall match a previous successful
+ * acquire() call. Calling this function on a pipeline handler that hasn't been
+ * acquired results in undefined behaviour.
*
- * This function shall not be called from pipeline handler implementation, as
- * the Camera class handles locking directly.
+ * Pipeline handlers shall not call this function directly as the Camera class
+ * handles access internally.
*
* \context This function is \threadsafe.
*
- * \sa lock()
+ * \sa acquire()
*/
-void PipelineHandler::unlock()
+void PipelineHandler::release()
{
MutexLocker locker(lock_);
- if (!lockOwner_)
- return;
+ ASSERT(useCount_);
+ unlockMediaDevices();
+
+ --useCount_;
+}
+
+void PipelineHandler::unlockMediaDevices()
+{
for (std::shared_ptr<MediaDevice> &media : mediaDevices_)
media->unlock();
-
- lockOwner_ = false;
}
/**
@@ -599,7 +616,7 @@ void PipelineHandler::disconnect()
*/
std::vector<std::weak_ptr<Camera>> cameras{ std::move(cameras_) };
- for (std::weak_ptr<Camera> ptr : cameras) {
+ for (const std::weak_ptr<Camera> &ptr : cameras) {
std::shared_ptr<Camera> camera = ptr.lock();
if (!camera)
continue;
diff --git a/src/libcamera/request.cpp b/src/libcamera/request.cpp
index d2af1d22..949c556f 100644
--- a/src/libcamera/request.cpp
+++ b/src/libcamera/request.cpp
@@ -158,9 +158,12 @@ void Request::Private::cancel()
}
/**
- * \copydoc Request::reuse()
+ * \brief Reset the request internal data to default values
+ *
+ * After calling this function, all request internal data will have default
+ * values as if the Request::Private instance had just been constructed.
*/
-void Request::Private::reuse()
+void Request::Private::reset()
{
sequence_ = 0;
cancelled_ = false;
@@ -380,7 +383,7 @@ void Request::reuse(ReuseFlag flags)
{
LIBCAMERA_TRACEPOINT(request_reuse, this);
- _d()->reuse();
+ _d()->reset();
if (flags & ReuseBuffers) {
for (auto pair : bufferMap_) {
diff --git a/src/libcamera/v4l2_device.cpp b/src/libcamera/v4l2_device.cpp
index 3fc8438f..c60f7c91 100644
--- a/src/libcamera/v4l2_device.cpp
+++ b/src/libcamera/v4l2_device.cpp
@@ -24,6 +24,7 @@
#include <libcamera/base/log.h>
#include <libcamera/base/utils.h>
+#include "libcamera/internal/formats.h"
#include "libcamera/internal/sysfs.h"
/**
@@ -88,7 +89,8 @@ int V4L2Device::open(unsigned int flags)
UniqueFD fd(syscall(SYS_openat, AT_FDCWD, deviceNode_.c_str(), flags));
if (!fd.isValid()) {
int ret = -errno;
- LOG(V4L2, Error) << "Failed to open V4L2 device: "
+ LOG(V4L2, Error) << "Failed to open V4L2 device '"
+ << deviceNode_ << "': "
<< strerror(-ret);
return ret;
}
@@ -242,7 +244,8 @@ ControlList V4L2Device::getControls(const std::vector<uint32_t> &ids)
}
/* A specific control failed. */
- LOG(V4L2, Error) << "Unable to read control " << errorIdx
+ const unsigned int id = v4l2Ctrls[errorIdx].id;
+ LOG(V4L2, Error) << "Unable to read control " << utils::hex(id)
<< ": " << strerror(-ret);
v4l2Ctrls.resize(errorIdx);
@@ -352,7 +355,8 @@ int V4L2Device::setControls(ControlList *ctrls)
}
/* A specific control failed. */
- LOG(V4L2, Error) << "Unable to set control " << errorIdx
+ const unsigned int id = v4l2Ctrls[errorIdx].id;
+ LOG(V4L2, Error) << "Unable to set control " << utils::hex(id)
<< ": " << strerror(-ret);
v4l2Ctrls.resize(errorIdx);
@@ -745,8 +749,12 @@ void V4L2Device::eventAvailable()
static const std::map<uint32_t, ColorSpace> v4l2ToColorSpace = {
{ V4L2_COLORSPACE_RAW, ColorSpace::Raw },
- { V4L2_COLORSPACE_JPEG, ColorSpace::Jpeg },
- { V4L2_COLORSPACE_SRGB, ColorSpace::Srgb },
+ { V4L2_COLORSPACE_SRGB, {
+ ColorSpace::Primaries::Rec709,
+ ColorSpace::TransferFunction::Srgb,
+ ColorSpace::YcbcrEncoding::Rec601,
+ ColorSpace::Range::Limited } },
+ { V4L2_COLORSPACE_JPEG, ColorSpace::Sycc },
{ V4L2_COLORSPACE_SMPTE170M, ColorSpace::Smpte170m },
{ V4L2_COLORSPACE_REC709, ColorSpace::Rec709 },
{ V4L2_COLORSPACE_BT2020, ColorSpace::Rec2020 },
@@ -771,8 +779,7 @@ static const std::map<uint32_t, ColorSpace::Range> v4l2ToRange = {
static const std::vector<std::pair<ColorSpace, v4l2_colorspace>> colorSpaceToV4l2 = {
{ ColorSpace::Raw, V4L2_COLORSPACE_RAW },
- { ColorSpace::Jpeg, V4L2_COLORSPACE_JPEG },
- { ColorSpace::Srgb, V4L2_COLORSPACE_SRGB },
+ { ColorSpace::Sycc, V4L2_COLORSPACE_JPEG },
{ ColorSpace::Smpte170m, V4L2_COLORSPACE_SMPTE170M },
{ ColorSpace::Rec709, V4L2_COLORSPACE_REC709 },
{ ColorSpace::Rec2020, V4L2_COLORSPACE_BT2020 },
@@ -792,6 +799,8 @@ static const std::map<ColorSpace::TransferFunction, v4l2_xfer_func> transferFunc
};
static const std::map<ColorSpace::YcbcrEncoding, v4l2_ycbcr_encoding> ycbcrEncodingToV4l2 = {
+ /* V4L2 has no "none" encoding. */
+ { ColorSpace::YcbcrEncoding::None, V4L2_YCBCR_ENC_DEFAULT },
{ ColorSpace::YcbcrEncoding::Rec601, V4L2_YCBCR_ENC_601 },
{ ColorSpace::YcbcrEncoding::Rec709, V4L2_YCBCR_ENC_709 },
{ ColorSpace::YcbcrEncoding::Rec2020, V4L2_YCBCR_ENC_BT2020 },
@@ -805,6 +814,7 @@ static const std::map<ColorSpace::Range, v4l2_quantization> rangeToV4l2 = {
/**
* \brief Convert the color space fields in a V4L2 format to a ColorSpace
* \param[in] v4l2Format A V4L2 format containing color space information
+ * \param[in] colourEncoding Type of colour encoding
*
* The colorspace, ycbcr_enc, xfer_func and quantization fields within a
* V4L2 format structure are converted to a corresponding ColorSpace.
@@ -816,7 +826,8 @@ static const std::map<ColorSpace::Range, v4l2_quantization> rangeToV4l2 = {
* \retval std::nullopt One or more V4L2 color space fields were not recognised
*/
template<typename T>
-std::optional<ColorSpace> V4L2Device::toColorSpace(const T &v4l2Format)
+std::optional<ColorSpace> V4L2Device::toColorSpace(const T &v4l2Format,
+ PixelFormatInfo::ColourEncoding colourEncoding)
{
auto itColor = v4l2ToColorSpace.find(v4l2Format.colorspace);
if (itColor == v4l2ToColorSpace.end())
@@ -839,6 +850,14 @@ std::optional<ColorSpace> V4L2Device::toColorSpace(const T &v4l2Format)
return std::nullopt;
colorSpace.ycbcrEncoding = itYcbcrEncoding->second;
+
+ /*
+ * V4L2 has no "none" encoding, override the value returned by
+ * the kernel for non-YUV formats as YCbCr encoding isn't
+ * applicable in that case.
+ */
+ if (colourEncoding != PixelFormatInfo::ColourEncodingYUV)
+ colorSpace.ycbcrEncoding = ColorSpace::YcbcrEncoding::None;
}
if (v4l2Format.quantization != V4L2_QUANTIZATION_DEFAULT) {
@@ -847,14 +866,24 @@ std::optional<ColorSpace> V4L2Device::toColorSpace(const T &v4l2Format)
return std::nullopt;
colorSpace.range = itRange->second;
+
+ /*
+ * "Limited" quantization range is only meant for YUV formats.
+ * Override the range to "Full" for all other formats.
+ */
+ if (colourEncoding != PixelFormatInfo::ColourEncodingYUV)
+ colorSpace.range = ColorSpace::Range::Full;
}
return colorSpace;
}
-template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_pix_format &);
-template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_pix_format_mplane &);
-template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_mbus_framefmt &);
+template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_pix_format &,
+ PixelFormatInfo::ColourEncoding);
+template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_pix_format_mplane &,
+ PixelFormatInfo::ColourEncoding);
+template std::optional<ColorSpace> V4L2Device::toColorSpace(const struct v4l2_mbus_framefmt &,
+ PixelFormatInfo::ColourEncoding);
/**
* \brief Fill in the color space fields of a V4L2 format from a ColorSpace
diff --git a/src/libcamera/v4l2_subdevice.cpp b/src/libcamera/v4l2_subdevice.cpp
index e5d20f2c..9ef95963 100644
--- a/src/libcamera/v4l2_subdevice.cpp
+++ b/src/libcamera/v4l2_subdevice.cpp
@@ -23,6 +23,7 @@
#include <libcamera/base/log.h>
#include <libcamera/base/utils.h>
+#include "libcamera/internal/formats.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/media_object.h"
@@ -42,10 +43,12 @@ namespace {
* \brief Information about media bus formats
* \param bitsPerPixel Bits per pixel
* \param name Name of MBUS format
+ * \param colourEncoding Type of colour encoding
*/
struct V4L2SubdeviceFormatInfo {
unsigned int bitsPerPixel;
const char *name;
+ PixelFormatInfo::ColourEncoding colourEncoding;
};
/*
@@ -54,81 +57,82 @@ struct V4L2SubdeviceFormatInfo {
* bus codes
*/
const std::map<uint32_t, V4L2SubdeviceFormatInfo> formatInfoMap = {
- { MEDIA_BUS_FMT_RGB444_2X8_PADHI_BE, { 16, "RGB444_2X8_PADHI_BE" } },
- { MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE, { 16, "RGB444_2X8_PADHI_LE" } },
- { MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE, { 16, "RGB555_2X8_PADHI_BE" } },
- { MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE, { 16, "RGB555_2X8_PADHI_LE" } },
- { MEDIA_BUS_FMT_RGB565_1X16, { 16, "RGB565_1X16" } },
- { MEDIA_BUS_FMT_BGR565_2X8_BE, { 16, "BGR565_2X8_BE" } },
- { MEDIA_BUS_FMT_BGR565_2X8_LE, { 16, "BGR565_2X8_LE" } },
- { MEDIA_BUS_FMT_RGB565_2X8_BE, { 16, "RGB565_2X8_BE" } },
- { MEDIA_BUS_FMT_RGB565_2X8_LE, { 16, "RGB565_2X8_LE" } },
- { MEDIA_BUS_FMT_RGB666_1X18, { 18, "RGB666_1X18" } },
- { MEDIA_BUS_FMT_RGB888_1X24, { 24, "RGB888_1X24" } },
- { MEDIA_BUS_FMT_RGB888_2X12_BE, { 24, "RGB888_2X12_BE" } },
- { MEDIA_BUS_FMT_RGB888_2X12_LE, { 24, "RGB888_2X12_LE" } },
- { MEDIA_BUS_FMT_ARGB8888_1X32, { 32, "ARGB8888_1X32" } },
- { MEDIA_BUS_FMT_Y8_1X8, { 8, "Y8_1X8" } },
- { MEDIA_BUS_FMT_UV8_1X8, { 8, "UV8_1X8" } },
- { MEDIA_BUS_FMT_UYVY8_1_5X8, { 12, "UYVY8_1_5X8" } },
- { MEDIA_BUS_FMT_VYUY8_1_5X8, { 12, "VYUY8_1_5X8" } },
- { MEDIA_BUS_FMT_YUYV8_1_5X8, { 12, "YUYV8_1_5X8" } },
- { MEDIA_BUS_FMT_YVYU8_1_5X8, { 12, "YVYU8_1_5X8" } },
- { MEDIA_BUS_FMT_UYVY8_2X8, { 16, "UYVY8_2X8" } },
- { MEDIA_BUS_FMT_VYUY8_2X8, { 16, "VYUY8_2X8" } },
- { MEDIA_BUS_FMT_YUYV8_2X8, { 16, "YUYV8_2X8" } },
- { MEDIA_BUS_FMT_YVYU8_2X8, { 16, "YVYU8_2X8" } },
- { MEDIA_BUS_FMT_Y10_1X10, { 10, "Y10_1X10" } },
- { MEDIA_BUS_FMT_UYVY10_2X10, { 20, "UYVY10_2X10" } },
- { MEDIA_BUS_FMT_VYUY10_2X10, { 20, "VYUY10_2X10" } },
- { MEDIA_BUS_FMT_YUYV10_2X10, { 20, "YUYV10_2X10" } },
- { MEDIA_BUS_FMT_YVYU10_2X10, { 20, "YVYU10_2X10" } },
- { MEDIA_BUS_FMT_Y12_1X12, { 12, "Y12_1X12" } },
- { MEDIA_BUS_FMT_UYVY8_1X16, { 16, "UYVY8_1X16" } },
- { MEDIA_BUS_FMT_VYUY8_1X16, { 16, "VYUY8_1X16" } },
- { MEDIA_BUS_FMT_YUYV8_1X16, { 16, "YUYV8_1X16" } },
- { MEDIA_BUS_FMT_YVYU8_1X16, { 16, "YVYU8_1X16" } },
- { MEDIA_BUS_FMT_YDYUYDYV8_1X16, { 16, "YDYUYDYV8_1X16" } },
- { MEDIA_BUS_FMT_UYVY10_1X20, { 20, "UYVY10_1X20" } },
- { MEDIA_BUS_FMT_VYUY10_1X20, { 20, "VYUY10_1X20" } },
- { MEDIA_BUS_FMT_YUYV10_1X20, { 20, "YUYV10_1X20" } },
- { MEDIA_BUS_FMT_YVYU10_1X20, { 20, "YVYU10_1X20" } },
- { MEDIA_BUS_FMT_YUV8_1X24, { 24, "YUV8_1X24" } },
- { MEDIA_BUS_FMT_YUV10_1X30, { 30, "YUV10_1X30" } },
- { MEDIA_BUS_FMT_AYUV8_1X32, { 32, "AYUV8_1X32" } },
- { MEDIA_BUS_FMT_UYVY12_2X12, { 24, "UYVY12_2X12" } },
- { MEDIA_BUS_FMT_VYUY12_2X12, { 24, "VYUY12_2X12" } },
- { MEDIA_BUS_FMT_YUYV12_2X12, { 24, "YUYV12_2X12" } },
- { MEDIA_BUS_FMT_YVYU12_2X12, { 24, "YVYU12_2X12" } },
- { MEDIA_BUS_FMT_UYVY12_1X24, { 24, "UYVY12_1X24" } },
- { MEDIA_BUS_FMT_VYUY12_1X24, { 24, "VYUY12_1X24" } },
- { MEDIA_BUS_FMT_YUYV12_1X24, { 24, "YUYV12_1X24" } },
- { MEDIA_BUS_FMT_YVYU12_1X24, { 24, "YVYU12_1X24" } },
- { MEDIA_BUS_FMT_SBGGR8_1X8, { 8, "SBGGR8_1X8" } },
- { MEDIA_BUS_FMT_SGBRG8_1X8, { 8, "SGBRG8_1X8" } },
- { MEDIA_BUS_FMT_SGRBG8_1X8, { 8, "SGRBG8_1X8" } },
- { MEDIA_BUS_FMT_SRGGB8_1X8, { 8, "SRGGB8_1X8" } },
- { MEDIA_BUS_FMT_SBGGR10_ALAW8_1X8, { 8, "SBGGR10_ALAW8_1X8" } },
- { MEDIA_BUS_FMT_SGBRG10_ALAW8_1X8, { 8, "SGBRG10_ALAW8_1X8" } },
- { MEDIA_BUS_FMT_SGRBG10_ALAW8_1X8, { 8, "SGRBG10_ALAW8_1X8" } },
- { MEDIA_BUS_FMT_SRGGB10_ALAW8_1X8, { 8, "SRGGB10_ALAW8_1X8" } },
- { MEDIA_BUS_FMT_SBGGR10_DPCM8_1X8, { 8, "SBGGR10_DPCM8_1X8" } },
- { MEDIA_BUS_FMT_SGBRG10_DPCM8_1X8, { 8, "SGBRG10_DPCM8_1X8" } },
- { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, { 8, "SGRBG10_DPCM8_1X8" } },
- { MEDIA_BUS_FMT_SRGGB10_DPCM8_1X8, { 8, "SRGGB10_DPCM8_1X8" } },
- { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_BE, { 16, "SBGGR10_2X8_PADHI_BE" } },
- { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_LE, { 16, "SBGGR10_2X8_PADHI_LE" } },
- { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_BE, { 16, "SBGGR10_2X8_PADLO_BE" } },
- { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_LE, { 16, "SBGGR10_2X8_PADLO_LE" } },
- { MEDIA_BUS_FMT_SBGGR10_1X10, { 10, "SBGGR10_1X10" } },
- { MEDIA_BUS_FMT_SGBRG10_1X10, { 10, "SGBRG10_1X10" } },
- { MEDIA_BUS_FMT_SGRBG10_1X10, { 10, "SGRBG10_1X10" } },
- { MEDIA_BUS_FMT_SRGGB10_1X10, { 10, "SRGGB10_1X10" } },
- { MEDIA_BUS_FMT_SBGGR12_1X12, { 12, "SBGGR12_1X12" } },
- { MEDIA_BUS_FMT_SGBRG12_1X12, { 12, "SGBRG12_1X12" } },
- { MEDIA_BUS_FMT_SGRBG12_1X12, { 12, "SGRBG12_1X12" } },
- { MEDIA_BUS_FMT_SRGGB12_1X12, { 12, "SRGGB12_1X12" } },
- { MEDIA_BUS_FMT_AHSV8888_1X32, { 32, "AHSV8888_1X32" } },
+ { MEDIA_BUS_FMT_RGB444_2X8_PADHI_BE, { 16, "RGB444_2X8_PADHI_BE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE, { 16, "RGB444_2X8_PADHI_LE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE, { 16, "RGB555_2X8_PADHI_BE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE, { 16, "RGB555_2X8_PADHI_LE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB565_1X16, { 16, "RGB565_1X16", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_BGR565_2X8_BE, { 16, "BGR565_2X8_BE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_BGR565_2X8_LE, { 16, "BGR565_2X8_LE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB565_2X8_BE, { 16, "RGB565_2X8_BE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB565_2X8_LE, { 16, "RGB565_2X8_LE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB666_1X18, { 18, "RGB666_1X18", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB888_1X24, { 24, "RGB888_1X24", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB888_2X12_BE, { 24, "RGB888_2X12_BE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_RGB888_2X12_LE, { 24, "RGB888_2X12_LE", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_ARGB8888_1X32, { 32, "ARGB8888_1X32", PixelFormatInfo::ColourEncodingRGB } },
+ { MEDIA_BUS_FMT_Y8_1X8, { 8, "Y8_1X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UV8_1X8, { 8, "UV8_1X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UYVY8_1_5X8, { 12, "UYVY8_1_5X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_VYUY8_1_5X8, { 12, "VYUY8_1_5X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUYV8_1_5X8, { 12, "YUYV8_1_5X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YVYU8_1_5X8, { 12, "YVYU8_1_5X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UYVY8_2X8, { 16, "UYVY8_2X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_VYUY8_2X8, { 16, "VYUY8_2X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUYV8_2X8, { 16, "YUYV8_2X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YVYU8_2X8, { 16, "YVYU8_2X8", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_Y10_1X10, { 10, "Y10_1X10", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UYVY10_2X10, { 20, "UYVY10_2X10", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_VYUY10_2X10, { 20, "VYUY10_2X10", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUYV10_2X10, { 20, "YUYV10_2X10", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YVYU10_2X10, { 20, "YVYU10_2X10", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_Y12_1X12, { 12, "Y12_1X12", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UYVY8_1X16, { 16, "UYVY8_1X16", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_VYUY8_1X16, { 16, "VYUY8_1X16", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUYV8_1X16, { 16, "YUYV8_1X16", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YVYU8_1X16, { 16, "YVYU8_1X16", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YDYUYDYV8_1X16, { 16, "YDYUYDYV8_1X16", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UYVY10_1X20, { 20, "UYVY10_1X20", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_VYUY10_1X20, { 20, "VYUY10_1X20", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUYV10_1X20, { 20, "YUYV10_1X20", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YVYU10_1X20, { 20, "YVYU10_1X20", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUV8_1X24, { 24, "YUV8_1X24", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUV10_1X30, { 30, "YUV10_1X30", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_AYUV8_1X32, { 32, "AYUV8_1X32", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UYVY12_2X12, { 24, "UYVY12_2X12", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_VYUY12_2X12, { 24, "VYUY12_2X12", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUYV12_2X12, { 24, "YUYV12_2X12", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YVYU12_2X12, { 24, "YVYU12_2X12", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_UYVY12_1X24, { 24, "UYVY12_1X24", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_VYUY12_1X24, { 24, "VYUY12_1X24", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YUYV12_1X24, { 24, "YUYV12_1X24", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_YVYU12_1X24, { 24, "YVYU12_1X24", PixelFormatInfo::ColourEncodingYUV } },
+ { MEDIA_BUS_FMT_SBGGR8_1X8, { 8, "SBGGR8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGBRG8_1X8, { 8, "SGBRG8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGRBG8_1X8, { 8, "SGRBG8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SRGGB8_1X8, { 8, "SRGGB8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR10_ALAW8_1X8, { 8, "SBGGR10_ALAW8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGBRG10_ALAW8_1X8, { 8, "SGBRG10_ALAW8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGRBG10_ALAW8_1X8, { 8, "SGRBG10_ALAW8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SRGGB10_ALAW8_1X8, { 8, "SRGGB10_ALAW8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR10_DPCM8_1X8, { 8, "SBGGR10_DPCM8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGBRG10_DPCM8_1X8, { 8, "SGBRG10_DPCM8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, { 8, "SGRBG10_DPCM8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SRGGB10_DPCM8_1X8, { 8, "SRGGB10_DPCM8_1X8", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_BE, { 16, "SBGGR10_2X8_PADHI_BE", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_LE, { 16, "SBGGR10_2X8_PADHI_LE", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_BE, { 16, "SBGGR10_2X8_PADLO_BE", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_LE, { 16, "SBGGR10_2X8_PADLO_LE", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR10_1X10, { 10, "SBGGR10_1X10", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGBRG10_1X10, { 10, "SGBRG10_1X10", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGRBG10_1X10, { 10, "SGRBG10_1X10", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SRGGB10_1X10, { 10, "SRGGB10_1X10", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SBGGR12_1X12, { 12, "SBGGR12_1X12", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGBRG12_1X12, { 12, "SGBRG12_1X12", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SGRBG12_1X12, { 12, "SGRBG12_1X12", PixelFormatInfo::ColourEncodingRAW } },
+ { MEDIA_BUS_FMT_SRGGB12_1X12, { 12, "SRGGB12_1X12", PixelFormatInfo::ColourEncodingRAW } },
+ /* \todo Clarify colour encoding for HSV formats */
+ { MEDIA_BUS_FMT_AHSV8888_1X32, { 32, "AHSV8888_1X32", PixelFormatInfo::ColourEncodingRGB } },
};
} /* namespace */
@@ -473,6 +477,36 @@ V4L2Subdevice::Formats V4L2Subdevice::formats(unsigned int pad)
return formats;
}
+std::optional<ColorSpace> V4L2Subdevice::toColorSpace(const v4l2_mbus_framefmt &format) const
+{
+ /*
+ * Only image formats have a color space, for other formats (such as
+ * metadata formats) the color space concept isn't applicable. V4L2
+ * subdev drivers return a colorspace set to V4L2_COLORSPACE_DEFAULT in
+ * that case (as well as for image formats when the driver hasn't
+ * bothered implementing color space support). Check the colorspace
+ * field here and return std::nullopt directly to avoid logging a
+ * warning.
+ */
+ if (format.colorspace == V4L2_COLORSPACE_DEFAULT)
+ return std::nullopt;
+
+ PixelFormatInfo::ColourEncoding colourEncoding;
+ auto iter = formatInfoMap.find(format.code);
+ if (iter != formatInfoMap.end()) {
+ colourEncoding = iter->second.colourEncoding;
+ } else {
+ LOG(V4L2, Warning)
+ << "Unknown subdev format "
+ << utils::hex(format.code, 4)
+ << ", defaulting to RGB encoding";
+
+ colourEncoding = PixelFormatInfo::ColourEncodingRGB;
+ }
+
+ return V4L2Device::toColorSpace(format, colourEncoding);
+}
+
/**
* \brief Retrieve the image format set on one of the V4L2 subdevice pads
* \param[in] pad The 0-indexed pad number the format is to be retrieved from
@@ -526,7 +560,13 @@ int V4L2Subdevice::setFormat(unsigned int pad, V4L2SubdeviceFormat *format,
subdevFmt.format.height = format->size.height;
subdevFmt.format.code = format->mbus_code;
subdevFmt.format.field = V4L2_FIELD_NONE;
- fromColorSpace(format->colorSpace, subdevFmt.format);
+ if (format->colorSpace) {
+ fromColorSpace(format->colorSpace, subdevFmt.format);
+
+ /* The CSC flag is only applicable to source pads. */
+ if (entity_->pads()[pad]->flags() & MEDIA_PAD_FL_SOURCE)
+ subdevFmt.format.flags |= V4L2_MBUS_FRAMEFMT_SET_CSC;
+ }
int ret = ioctl(VIDIOC_SUBDEV_S_FMT, &subdevFmt);
if (ret) {
diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp
index b80ee1cd..955e1508 100644
--- a/src/libcamera/v4l2_videodevice.cpp
+++ b/src/libcamera/v4l2_videodevice.cpp
@@ -914,6 +914,13 @@ int V4L2VideoDevice::trySetFormatMeta(V4L2DeviceFormat *format, bool set)
return 0;
}
+template<typename T>
+std::optional<ColorSpace> V4L2VideoDevice::toColorSpace(const T &v4l2Format)
+{
+ V4L2PixelFormat fourcc{ v4l2Format.pixelformat };
+ return V4L2Device::toColorSpace(v4l2Format, PixelFormatInfo::info(fourcc).colourEncoding);
+}
+
int V4L2VideoDevice::getFormatMultiplane(V4L2DeviceFormat *format)
{
struct v4l2_format v4l2Format = {};
@@ -953,7 +960,12 @@ int V4L2VideoDevice::trySetFormatMultiplane(V4L2DeviceFormat *format, bool set)
pix->pixelformat = format->fourcc;
pix->num_planes = format->planesCount;
pix->field = V4L2_FIELD_NONE;
- fromColorSpace(format->colorSpace, *pix);
+ if (format->colorSpace) {
+ fromColorSpace(format->colorSpace, *pix);
+
+ if (caps_.isVideoCapture())
+ pix->flags |= V4L2_PIX_FMT_FLAG_SET_CSC;
+ }
ASSERT(pix->num_planes <= std::size(pix->plane_fmt));
@@ -1023,7 +1035,12 @@ int V4L2VideoDevice::trySetFormatSingleplane(V4L2DeviceFormat *format, bool set)
pix->pixelformat = format->fourcc;
pix->bytesperline = format->planes[0].bpl;
pix->field = V4L2_FIELD_NONE;
- fromColorSpace(format->colorSpace, *pix);
+ if (format->colorSpace) {
+ fromColorSpace(format->colorSpace, *pix);
+
+ if (caps_.isVideoCapture())
+ pix->flags |= V4L2_PIX_FMT_FLAG_SET_CSC;
+ }
ret = ioctl(set ? VIDIOC_S_FMT : VIDIOC_TRY_FMT, &v4l2Format);
if (ret) {
diff --git a/src/libcamera/yaml_parser.cpp b/src/libcamera/yaml_parser.cpp
index 9162e225..d8a7c2f9 100644
--- a/src/libcamera/yaml_parser.cpp
+++ b/src/libcamera/yaml_parser.cpp
@@ -131,111 +131,141 @@ std::optional<bool> YamlObject::get() const
return std::nullopt;
}
-template<>
-std::optional<int16_t> YamlObject::get() const
+namespace {
+
+bool parseSignedInteger(const std::string &str, long min, long max,
+ long *result)
{
- if (type_ != Type::Value)
- return std::nullopt;
+ if (str == "")
+ return false;
- if (value_ == "")
- return std::nullopt;
+ char *end;
+
+ errno = 0;
+ long value = std::strtol(str.c_str(), &end, 10);
+
+ if ('\0' != *end || errno == ERANGE || value < min || value > max)
+ return false;
+
+ *result = value;
+ return true;
+}
+
+bool parseUnsignedInteger(const std::string &str, unsigned long max,
+ unsigned long *result)
+{
+ if (str == "")
+ return false;
+
+ /*
+ * strtoul() accepts strings representing a negative number, in which
+ * case it negates the converted value. We don't want to silently accept
+ * negative values and return a large positive number, so check for a
+ * minus sign (after optional whitespace) and return an error.
+ */
+ std::size_t found = str.find_first_not_of(" \t");
+ if (found != std::string::npos && str[found] == '-')
+ return false;
char *end;
errno = 0;
- int16_t value = std::strtol(value_.c_str(), &end, 10);
+ unsigned long value = std::strtoul(str.c_str(), &end, 10);
- if ('\0' != *end || errno == ERANGE ||
- value < std::numeric_limits<int16_t>::min() ||
- value > std::numeric_limits<int16_t>::max())
- return std::nullopt;
+ if ('\0' != *end || errno == ERANGE || value > max)
+ return false;
- return value;
+ *result = value;
+ return true;
}
+} /* namespace */
+
template<>
-std::optional<uint16_t> YamlObject::get() const
+std::optional<int8_t> YamlObject::get() const
{
if (type_ != Type::Value)
return std::nullopt;
- if (value_ == "")
- return std::nullopt;
+ long value;
- /*
- * libyaml parses all scalar values as strings. When a string has
- * leading spaces before a minus sign, for example " -10", strtoul
- * skips leading spaces, accepts the leading minus sign, and the
- * calculated digits are negated as if by unary minus. Rule it out in
- * case the user gets a large number when the value is negative.
- */
- std::size_t found = value_.find_first_not_of(" \t");
- if (found != std::string::npos && value_[found] == '-')
+ if (!parseSignedInteger(value_, std::numeric_limits<int8_t>::min(),
+ std::numeric_limits<int8_t>::max(), &value))
return std::nullopt;
- char *end;
+ return value;
+}
- errno = 0;
- uint16_t value = std::strtoul(value_.c_str(), &end, 10);
+template<>
+std::optional<uint8_t> YamlObject::get() const
+{
+ if (type_ != Type::Value)
+ return std::nullopt;
+
+ unsigned long value;
- if ('\0' != *end || errno == ERANGE ||
- value < std::numeric_limits<uint16_t>::min() ||
- value > std::numeric_limits<uint16_t>::max())
+ if (!parseUnsignedInteger(value_, std::numeric_limits<uint8_t>::max(),
+ &value))
return std::nullopt;
return value;
}
template<>
-std::optional<int32_t> YamlObject::get() const
+std::optional<int16_t> YamlObject::get() const
{
if (type_ != Type::Value)
return std::nullopt;
- if (value_ == "")
+ long value;
+
+ if (!parseSignedInteger(value_, std::numeric_limits<int16_t>::min(),
+ std::numeric_limits<int16_t>::max(), &value))
return std::nullopt;
- char *end;
+ return value;
+}
- errno = 0;
- long value = std::strtol(value_.c_str(), &end, 10);
+template<>
+std::optional<uint16_t> YamlObject::get() const
+{
+ if (type_ != Type::Value)
+ return std::nullopt;
+
+ unsigned long value;
- if ('\0' != *end || errno == ERANGE ||
- value < std::numeric_limits<int32_t>::min() ||
- value > std::numeric_limits<int32_t>::max())
+ if (!parseUnsignedInteger(value_, std::numeric_limits<uint16_t>::max(),
+ &value))
return std::nullopt;
return value;
}
template<>
-std::optional<uint32_t> YamlObject::get() const
+std::optional<int32_t> YamlObject::get() const
{
if (type_ != Type::Value)
return std::nullopt;
- if (value_ == "")
- return std::nullopt;
+ long value;
- /*
- * libyaml parses all scalar values as strings. When a string has
- * leading spaces before a minus sign, for example " -10", strtoul
- * skips leading spaces, accepts the leading minus sign, and the
- * calculated digits are negated as if by unary minus. Rule it out in
- * case the user gets a large number when the value is negative.
- */
- std::size_t found = value_.find_first_not_of(" \t");
- if (found != std::string::npos && value_[found] == '-')
+ if (!parseSignedInteger(value_, std::numeric_limits<int32_t>::min(),
+ std::numeric_limits<int32_t>::max(), &value))
return std::nullopt;
- char *end;
+ return value;
+}
- errno = 0;
- unsigned long value = std::strtoul(value_.c_str(), &end, 10);
+template<>
+std::optional<uint32_t> YamlObject::get() const
+{
+ if (type_ != Type::Value)
+ return std::nullopt;
+
+ unsigned long value;
- if ('\0' != *end || errno == ERANGE ||
- value < std::numeric_limits<uint32_t>::min() ||
- value > std::numeric_limits<uint32_t>::max())
+ if (!parseUnsignedInteger(value_, std::numeric_limits<uint32_t>::max(),
+ &value))
return std::nullopt;
return value;
@@ -310,6 +340,8 @@ template<typename T,
std::enable_if_t<
std::is_same_v<bool, T> ||
std::is_same_v<double, T> ||
+ std::is_same_v<int8_t, T> ||
+ std::is_same_v<uint8_t, T> ||
std::is_same_v<int16_t, T> ||
std::is_same_v<uint16_t, T> ||
std::is_same_v<int32_t, T> ||
@@ -336,6 +368,8 @@ std::optional<std::vector<T>> YamlObject::getList() const
template std::optional<std::vector<bool>> YamlObject::getList<bool>() const;
template std::optional<std::vector<double>> YamlObject::getList<double>() const;
+template std::optional<std::vector<int8_t>> YamlObject::getList<int8_t>() const;
+template std::optional<std::vector<uint8_t>> YamlObject::getList<uint8_t>() const;
template std::optional<std::vector<int16_t>> YamlObject::getList<int16_t>() const;
template std::optional<std::vector<uint16_t>> YamlObject::getList<uint16_t>() const;
template std::optional<std::vector<int32_t>> YamlObject::getList<int32_t>() const;
@@ -807,7 +841,9 @@ std::unique_ptr<YamlObject> YamlParser::parse(File &file)
std::unique_ptr<YamlObject> root(new YamlObject());
if (context.parseContent(*root)) {
- LOG(YamlParser, Error) << "Failed to parse YAML content";
+ LOG(YamlParser, Error)
+ << "Failed to parse YAML content from "
+ << file.fileName();
return nullptr;
}
diff --git a/src/py/cam/cam.py b/src/py/cam/cam.py
index 2ae89fa8..2701d937 100755
--- a/src/py/cam/cam.py
+++ b/src/py/cam/cam.py
@@ -3,9 +3,6 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>
-# \todo Convert ctx and state dicts to proper classes, and move relevant
-# functions to those classes.
-
from typing import Any
import argparse
import binascii
@@ -434,7 +431,10 @@ def main():
if args.info:
ctx.do_cmd_info()
- if args.capture:
+ # Filter out capture contexts which are not marked for capture
+ contexts = [ctx for ctx in contexts if ctx.opt_capture > 0]
+
+ if contexts:
state = CaptureState(cm, contexts)
if args.renderer == 'null':
diff --git a/src/py/examples/simple-cam.py b/src/py/examples/simple-cam.py
index 2b81bb65..b3e97ca7 100755
--- a/src/py/examples/simple-cam.py
+++ b/src/py/examples/simple-cam.py
@@ -19,8 +19,9 @@ TIMEOUT_SEC = 3
def handle_camera_event(cm):
- # cm.get_ready_requests() will not block here, as we know there is an event
- # to read.
+ # cm.get_ready_requests() returns the ready requests, which in our case
+ # should almost always return a single Request, but in some cases there
+ # could be multiple or none.
reqs = cm.get_ready_requests()
diff --git a/src/py/examples/simple-capture.py b/src/py/examples/simple-capture.py
index a6a9b33e..5f93574f 100755
--- a/src/py/examples/simple-capture.py
+++ b/src/py/examples/simple-capture.py
@@ -14,6 +14,7 @@
import argparse
import libcamera as libcam
+import selectors
import sys
# Number of frames to capture
@@ -107,11 +108,18 @@ def main():
# The main loop. Wait for the queued Requests to complete, process them,
# and re-queue them again.
+ sel = selectors.DefaultSelector()
+ sel.register(cm.event_fd, selectors.EVENT_READ)
+
while frames_done < TOTAL_FRAMES:
- # cm.get_ready_requests() blocks until there is an event and returns
- # all the ready requests. Here we should almost always get a single
+ # cm.get_ready_requests() does not block, so we use a Selector to wait
+ # for a camera event. Here we should almost always get a single
# Request, but in some cases there could be multiple or none.
+ events = sel.select()
+ if not events:
+ continue
+
reqs = cm.get_ready_requests()
for req in reqs:
diff --git a/src/py/examples/simple-continuous-capture.py b/src/py/examples/simple-continuous-capture.py
index fe78a2dd..26a8060b 100755
--- a/src/py/examples/simple-continuous-capture.py
+++ b/src/py/examples/simple-continuous-capture.py
@@ -88,8 +88,9 @@ class CaptureContext:
camera_contexts: list[CameraCaptureContext] = []
def handle_camera_event(self):
- # cm.get_ready_requests() will not block here, as we know there is an event
- # to read.
+ # cm.get_ready_requests() returns the ready requests, which in our case
+ # should almost always return a single Request, but in some cases there
+ # could be multiple or none.
reqs = self.cm.get_ready_requests()
diff --git a/src/py/libcamera/meson.build b/src/py/libcamera/meson.build
index eb884538..af19ffdd 100644
--- a/src/py/libcamera/meson.build
+++ b/src/py/libcamera/meson.build
@@ -13,8 +13,10 @@ pybind11_proj = subproject('pybind11')
pybind11_dep = pybind11_proj.get_variable('pybind11_dep')
pycamera_sources = files([
+ 'py_camera_manager.cpp',
'py_enums.cpp',
'py_geometry.cpp',
+ 'py_helpers.cpp',
'py_main.cpp',
])
@@ -59,7 +61,7 @@ pycamera_sources += custom_target('py_gen_formats',
command : [gen_py_formats, '-o', '@OUTPUT@', '@INPUT@'])
pycamera_deps = [
- libcamera_public,
+ libcamera_private,
py3_dep,
pybind11_dep,
]
@@ -68,7 +70,6 @@ pycamera_args = [
'-fvisibility=hidden',
'-Wno-shadow',
'-DPYBIND11_USE_SMART_HOLDER_AS_DEFAULT',
- '-DLIBCAMERA_BASE_PRIVATE',
]
destdir = get_option('libdir') / ('python' + py3_dep.version()) / 'site-packages' / 'libcamera'
diff --git a/src/py/libcamera/py_camera_manager.cpp b/src/py/libcamera/py_camera_manager.cpp
new file mode 100644
index 00000000..9ccb7aad
--- /dev/null
+++ b/src/py/libcamera/py_camera_manager.cpp
@@ -0,0 +1,131 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>
+ */
+
+#include "py_camera_manager.h"
+
+#include <errno.h>
+#include <memory>
+#include <sys/eventfd.h>
+#include <system_error>
+#include <unistd.h>
+#include <vector>
+
+#include "py_main.h"
+
+namespace py = pybind11;
+
+using namespace libcamera;
+
+PyCameraManager::PyCameraManager()
+{
+ LOG(Python, Debug) << "PyCameraManager()";
+
+ cameraManager_ = std::make_unique<CameraManager>();
+
+ int fd = eventfd(0, EFD_CLOEXEC | EFD_NONBLOCK);
+ if (fd == -1)
+ throw std::system_error(errno, std::generic_category(),
+ "Failed to create eventfd");
+
+ eventFd_ = UniqueFD(fd);
+
+ int ret = cameraManager_->start();
+ if (ret)
+ throw std::system_error(-ret, std::generic_category(),
+ "Failed to start CameraManager");
+}
+
+PyCameraManager::~PyCameraManager()
+{
+ LOG(Python, Debug) << "~PyCameraManager()";
+}
+
+py::list PyCameraManager::cameras()
+{
+ /*
+ * Create a list of Cameras, where each camera has a keep-alive to
+ * CameraManager.
+ */
+ py::list l;
+
+ for (auto &camera : cameraManager_->cameras()) {
+ py::object py_cm = py::cast(this);
+ py::object py_cam = py::cast(camera);
+ py::detail::keep_alive_impl(py_cam, py_cm);
+ l.append(py_cam);
+ }
+
+ return l;
+}
+
+std::vector<py::object> PyCameraManager::getReadyRequests()
+{
+ int ret = readFd();
+
+ if (ret == -EAGAIN)
+ return std::vector<py::object>();
+
+ if (ret != 0)
+ throw std::system_error(-ret, std::generic_category());
+
+ std::vector<py::object> py_reqs;
+
+ for (Request *request : getCompletedRequests()) {
+ py::object o = py::cast(request);
+ /* Decrease the ref increased in Camera.queue_request() */
+ o.dec_ref();
+ py_reqs.push_back(o);
+ }
+
+ return py_reqs;
+}
+
+/* Note: Called from another thread */
+void PyCameraManager::handleRequestCompleted(Request *req)
+{
+ pushRequest(req);
+ writeFd();
+}
+
+void PyCameraManager::writeFd()
+{
+ uint64_t v = 1;
+
+ size_t s = write(eventFd_.get(), &v, 8);
+ /*
+ * We should never fail, and have no simple means to manage the error,
+ * so let's log a fatal error.
+ */
+ if (s != 8)
+ LOG(Python, Fatal) << "Unable to write to eventfd";
+}
+
+int PyCameraManager::readFd()
+{
+ uint8_t buf[8];
+
+ ssize_t ret = read(eventFd_.get(), buf, 8);
+
+ if (ret == 8)
+ return 0;
+ else if (ret < 0)
+ return -errno;
+ else
+ return -EIO;
+}
+
+void PyCameraManager::pushRequest(Request *req)
+{
+ MutexLocker guard(completedRequestsMutex_);
+ completedRequests_.push_back(req);
+}
+
+std::vector<Request *> PyCameraManager::getCompletedRequests()
+{
+ std::vector<Request *> v;
+ MutexLocker guard(completedRequestsMutex_);
+ swap(v, completedRequests_);
+ return v;
+}
diff --git a/src/py/libcamera/py_camera_manager.h b/src/py/libcamera/py_camera_manager.h
new file mode 100644
index 00000000..3525057d
--- /dev/null
+++ b/src/py/libcamera/py_camera_manager.h
@@ -0,0 +1,45 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>
+ */
+
+#pragma once
+
+#include <libcamera/base/mutex.h>
+
+#include <libcamera/libcamera.h>
+
+#include <pybind11/smart_holder.h>
+
+using namespace libcamera;
+
+class PyCameraManager
+{
+public:
+ PyCameraManager();
+ ~PyCameraManager();
+
+ pybind11::list cameras();
+ std::shared_ptr<Camera> get(const std::string &name) { return cameraManager_->get(name); }
+
+ static const std::string &version() { return CameraManager::version(); }
+
+ int eventFd() const { return eventFd_.get(); }
+
+ std::vector<pybind11::object> getReadyRequests();
+
+ void handleRequestCompleted(Request *req);
+
+private:
+ std::unique_ptr<CameraManager> cameraManager_;
+
+ UniqueFD eventFd_;
+ libcamera::Mutex completedRequestsMutex_;
+ std::vector<Request *> completedRequests_
+ LIBCAMERA_TSA_GUARDED_BY(completedRequestsMutex_);
+
+ void writeFd();
+ int readFd();
+ void pushRequest(Request *req);
+ std::vector<Request *> getCompletedRequests();
+};
diff --git a/src/py/libcamera/py_helpers.cpp b/src/py/libcamera/py_helpers.cpp
new file mode 100644
index 00000000..45aecce9
--- /dev/null
+++ b/src/py/libcamera/py_helpers.cpp
@@ -0,0 +1,97 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>
+ */
+
+#include "py_helpers.h"
+
+#include <libcamera/libcamera.h>
+
+#include <pybind11/functional.h>
+#include <pybind11/stl.h>
+#include <pybind11/stl_bind.h>
+
+namespace py = pybind11;
+
+using namespace libcamera;
+
+template<typename T>
+static py::object valueOrTuple(const ControlValue &cv)
+{
+ if (cv.isArray()) {
+ const T *v = reinterpret_cast<const T *>(cv.data().data());
+ auto t = py::tuple(cv.numElements());
+
+ for (size_t i = 0; i < cv.numElements(); ++i)
+ t[i] = v[i];
+
+ return std::move(t);
+ }
+
+ return py::cast(cv.get<T>());
+}
+
+py::object controlValueToPy(const ControlValue &cv)
+{
+ switch (cv.type()) {
+ case ControlTypeBool:
+ return valueOrTuple<bool>(cv);
+ case ControlTypeByte:
+ return valueOrTuple<uint8_t>(cv);
+ case ControlTypeInteger32:
+ return valueOrTuple<int32_t>(cv);
+ case ControlTypeInteger64:
+ return valueOrTuple<int64_t>(cv);
+ case ControlTypeFloat:
+ return valueOrTuple<float>(cv);
+ case ControlTypeString:
+ return py::cast(cv.get<std::string>());
+ case ControlTypeRectangle: {
+ const Rectangle *v = reinterpret_cast<const Rectangle *>(cv.data().data());
+ return py::cast(v);
+ }
+ case ControlTypeSize: {
+ const Size *v = reinterpret_cast<const Size *>(cv.data().data());
+ return py::cast(v);
+ }
+ case ControlTypeNone:
+ default:
+ throw std::runtime_error("Unsupported ControlValue type");
+ }
+}
+
+template<typename T>
+static ControlValue controlValueMaybeArray(const py::object &ob)
+{
+ if (py::isinstance<py::list>(ob) || py::isinstance<py::tuple>(ob)) {
+ std::vector<T> vec = ob.cast<std::vector<T>>();
+ return ControlValue(Span<const T>(vec));
+ }
+
+ return ControlValue(ob.cast<T>());
+}
+
+ControlValue pyToControlValue(const py::object &ob, ControlType type)
+{
+ switch (type) {
+ case ControlTypeBool:
+ return ControlValue(ob.cast<bool>());
+ case ControlTypeByte:
+ return controlValueMaybeArray<uint8_t>(ob);
+ case ControlTypeInteger32:
+ return controlValueMaybeArray<int32_t>(ob);
+ case ControlTypeInteger64:
+ return controlValueMaybeArray<int64_t>(ob);
+ case ControlTypeFloat:
+ return controlValueMaybeArray<float>(ob);
+ case ControlTypeString:
+ return ControlValue(ob.cast<std::string>());
+ case ControlTypeRectangle:
+ return ControlValue(ob.cast<Rectangle>());
+ case ControlTypeSize:
+ return ControlValue(ob.cast<Size>());
+ case ControlTypeNone:
+ default:
+ throw std::runtime_error("Control type not implemented");
+ }
+}
diff --git a/src/py/libcamera/py_helpers.h b/src/py/libcamera/py_helpers.h
new file mode 100644
index 00000000..cd31e2cc
--- /dev/null
+++ b/src/py/libcamera/py_helpers.h
@@ -0,0 +1,13 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>
+ */
+
+#pragma once
+
+#include <libcamera/libcamera.h>
+
+#include <pybind11/smart_holder.h>
+
+pybind11::object controlValueToPy(const libcamera::ControlValue &cv);
+libcamera::ControlValue pyToControlValue(const pybind11::object &ob, libcamera::ControlType type);
diff --git a/src/py/libcamera/py_main.cpp b/src/py/libcamera/py_main.cpp
index 505cc3dc..75947889 100644
--- a/src/py/libcamera/py_main.cpp
+++ b/src/py/libcamera/py_main.cpp
@@ -5,10 +5,12 @@
* Python bindings
*/
-#include <mutex>
+#include "py_main.h"
+
+#include <memory>
#include <stdexcept>
-#include <sys/eventfd.h>
-#include <unistd.h>
+#include <string>
+#include <vector>
#include <libcamera/base/log.h>
@@ -19,112 +21,24 @@
#include <pybind11/stl.h>
#include <pybind11/stl_bind.h>
+#include "py_camera_manager.h"
+#include "py_helpers.h"
+
namespace py = pybind11;
using namespace libcamera;
-template<typename T>
-static py::object valueOrTuple(const ControlValue &cv)
-{
- if (cv.isArray()) {
- const T *v = reinterpret_cast<const T *>(cv.data().data());
- auto t = py::tuple(cv.numElements());
-
- for (size_t i = 0; i < cv.numElements(); ++i)
- t[i] = v[i];
-
- return std::move(t);
- }
-
- return py::cast(cv.get<T>());
-}
-
-static py::object controlValueToPy(const ControlValue &cv)
-{
- switch (cv.type()) {
- case ControlTypeBool:
- return valueOrTuple<bool>(cv);
- case ControlTypeByte:
- return valueOrTuple<uint8_t>(cv);
- case ControlTypeInteger32:
- return valueOrTuple<int32_t>(cv);
- case ControlTypeInteger64:
- return valueOrTuple<int64_t>(cv);
- case ControlTypeFloat:
- return valueOrTuple<float>(cv);
- case ControlTypeString:
- return py::cast(cv.get<std::string>());
- case ControlTypeRectangle: {
- const Rectangle *v = reinterpret_cast<const Rectangle *>(cv.data().data());
- return py::cast(v);
- }
- case ControlTypeSize: {
- const Size *v = reinterpret_cast<const Size *>(cv.data().data());
- return py::cast(v);
- }
- case ControlTypeNone:
- default:
- throw std::runtime_error("Unsupported ControlValue type");
- }
-}
-
-template<typename T>
-static ControlValue controlValueMaybeArray(const py::object &ob)
-{
- if (py::isinstance<py::list>(ob) || py::isinstance<py::tuple>(ob)) {
- std::vector<T> vec = ob.cast<std::vector<T>>();
- return ControlValue(Span<const T>(vec));
- }
+namespace libcamera {
- return ControlValue(ob.cast<T>());
-}
+LOG_DEFINE_CATEGORY(Python)
-static ControlValue pyToControlValue(const py::object &ob, ControlType type)
-{
- switch (type) {
- case ControlTypeBool:
- return ControlValue(ob.cast<bool>());
- case ControlTypeByte:
- return controlValueMaybeArray<uint8_t>(ob);
- case ControlTypeInteger32:
- return controlValueMaybeArray<int32_t>(ob);
- case ControlTypeInteger64:
- return controlValueMaybeArray<int64_t>(ob);
- case ControlTypeFloat:
- return controlValueMaybeArray<float>(ob);
- case ControlTypeString:
- return ControlValue(ob.cast<std::string>());
- case ControlTypeRectangle:
- return ControlValue(ob.cast<Rectangle>());
- case ControlTypeSize:
- return ControlValue(ob.cast<Size>());
- case ControlTypeNone:
- default:
- throw std::runtime_error("Control type not implemented");
- }
}
-static std::weak_ptr<CameraManager> gCameraManager;
-static int gEventfd;
-static std::mutex gReqlistMutex;
-static std::vector<Request *> gReqList;
-
-static void handleRequestCompleted(Request *req)
-{
- {
- std::lock_guard guard(gReqlistMutex);
- gReqList.push_back(req);
- }
-
- uint64_t v = 1;
- size_t s = write(gEventfd, &v, 8);
- /*
- * We should never fail, and have no simple means to manage the error,
- * so let's use LOG(Fatal).
- */
- if (s != 8)
- LOG(Fatal) << "Unable to write to eventfd";
-}
+/*
+ * Note: global C++ destructors can be ran on this before the py module is
+ * destructed.
+ */
+static std::weak_ptr<PyCameraManager> gCameraManager;
void init_py_enums(py::module &m);
void init_py_controls_generated(py::module &m);
@@ -147,7 +61,7 @@ PYBIND11_MODULE(_libcamera, m)
* https://pybind11.readthedocs.io/en/latest/advanced/misc.html#avoiding-c-types-in-docstrings
*/
- auto pyCameraManager = py::class_<CameraManager>(m, "CameraManager");
+ auto pyCameraManager = py::class_<PyCameraManager>(m, "CameraManager");
auto pyCamera = py::class_<Camera>(m, "Camera");
auto pyCameraConfiguration = py::class_<CameraConfiguration>(m, "CameraConfiguration");
auto pyCameraConfigurationStatus = py::enum_<CameraConfiguration::Status>(pyCameraConfiguration, "Status");
@@ -181,78 +95,22 @@ PYBIND11_MODULE(_libcamera, m)
/* Classes */
pyCameraManager
.def_static("singleton", []() {
- std::shared_ptr<CameraManager> cm = gCameraManager.lock();
- if (cm)
- return cm;
-
- int fd = eventfd(0, 0);
- if (fd == -1)
- throw std::system_error(errno, std::generic_category(),
- "Failed to create eventfd");
-
- cm = std::shared_ptr<CameraManager>(new CameraManager, [](auto p) {
- close(gEventfd);
- gEventfd = -1;
- delete p;
- });
+ std::shared_ptr<PyCameraManager> cm = gCameraManager.lock();
- gEventfd = fd;
- gCameraManager = cm;
-
- int ret = cm->start();
- if (ret)
- throw std::system_error(-ret, std::generic_category(),
- "Failed to start CameraManager");
-
- return cm;
- })
-
- .def_property_readonly("version", &CameraManager::version)
-
- .def_property_readonly("event_fd", [](CameraManager &) {
- return gEventfd;
- })
-
- .def("get_ready_requests", [](CameraManager &) {
- uint8_t buf[8];
-
- if (read(gEventfd, buf, 8) != 8)
- throw std::system_error(errno, std::generic_category());
-
- std::vector<Request *> v;
-
- {
- std::lock_guard guard(gReqlistMutex);
- swap(v, gReqList);
+ if (!cm) {
+ cm = std::make_shared<PyCameraManager>();
+ gCameraManager = cm;
}
- std::vector<py::object> ret;
-
- for (Request *req : v) {
- py::object o = py::cast(req);
- /* Decrease the ref increased in Camera.queue_request() */
- o.dec_ref();
- ret.push_back(o);
- }
-
- return ret;
+ return cm;
})
- .def("get", py::overload_cast<const std::string &>(&CameraManager::get), py::keep_alive<0, 1>())
-
- /* Create a list of Cameras, where each camera has a keep-alive to CameraManager */
- .def_property_readonly("cameras", [](CameraManager &self) {
- py::list l;
-
- for (auto &c : self.cameras()) {
- py::object py_cm = py::cast(self);
- py::object py_cam = py::cast(c);
- py::detail::keep_alive_impl(py_cam, py_cm);
- l.append(py_cam);
- }
+ .def_property_readonly("version", &PyCameraManager::version)
+ .def("get", &PyCameraManager::get, py::keep_alive<0, 1>())
+ .def_property_readonly("cameras", &PyCameraManager::cameras)
- return l;
- });
+ .def_property_readonly("event_fd", &PyCameraManager::eventFd)
+ .def("get_ready_requests", &PyCameraManager::getReadyRequests);
pyCamera
.def_property_readonly("id", &Camera::id)
@@ -262,7 +120,10 @@ PYBIND11_MODULE(_libcamera, m)
const std::unordered_map<const ControlId *, py::object> &controls) {
/* \todo What happens if someone calls start() multiple times? */
- self.requestCompleted.connect(handleRequestCompleted);
+ auto cm = gCameraManager.lock();
+ ASSERT(cm);
+
+ self.requestCompleted.connect(cm.get(), &PyCameraManager::handleRequestCompleted);
ControlList controlList(self.controls());
@@ -273,7 +134,7 @@ PYBIND11_MODULE(_libcamera, m)
int ret = self.start(&controlList);
if (ret) {
- self.requestCompleted.disconnect(handleRequestCompleted);
+ self.requestCompleted.disconnect();
return ret;
}
@@ -285,7 +146,7 @@ PYBIND11_MODULE(_libcamera, m)
if (ret)
return ret;
- self.requestCompleted.disconnect(handleRequestCompleted);
+ self.requestCompleted.disconnect();
return 0;
})
@@ -602,8 +463,8 @@ PYBIND11_MODULE(_libcamera, m)
.def_readwrite("ycbcrEncoding", &ColorSpace::ycbcrEncoding)
.def_readwrite("range", &ColorSpace::range)
.def_static("Raw", []() { return ColorSpace::Raw; })
- .def_static("Jpeg", []() { return ColorSpace::Jpeg; })
.def_static("Srgb", []() { return ColorSpace::Srgb; })
+ .def_static("Sycc", []() { return ColorSpace::Sycc; })
.def_static("Smpte170m", []() { return ColorSpace::Smpte170m; })
.def_static("Rec709", []() { return ColorSpace::Rec709; })
.def_static("Rec2020", []() { return ColorSpace::Rec2020; });
diff --git a/src/py/libcamera/py_main.h b/src/py/libcamera/py_main.h
new file mode 100644
index 00000000..5bb5f2d1
--- /dev/null
+++ b/src/py/libcamera/py_main.h
@@ -0,0 +1,14 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>
+ */
+
+#pragma once
+
+#include <libcamera/base/log.h>
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(Python)
+
+}
diff --git a/src/qcam/assets/shader/YUV_2_planes.frag b/src/qcam/assets/shader/YUV_2_planes.frag
index 254463c0..da8dbcc5 100644
--- a/src/qcam/assets/shader/YUV_2_planes.frag
+++ b/src/qcam/assets/shader/YUV_2_planes.frag
@@ -13,27 +13,30 @@ varying vec2 textureOut;
uniform sampler2D tex_y;
uniform sampler2D tex_u;
+const mat3 yuv2rgb_matrix = mat3(
+ YUV2RGB_MATRIX
+);
+
+const vec3 yuv2rgb_offset = vec3(
+ YUV2RGB_Y_OFFSET / 255.0, 128.0 / 255.0, 128.0 / 255.0
+);
+
void main(void)
{
vec3 yuv;
- vec3 rgb;
- mat3 yuv2rgb_bt601_mat = mat3(
- vec3(1.164, 1.164, 1.164),
- vec3(0.000, -0.392, 2.017),
- vec3(1.596, -0.813, 0.000)
- );
-
- yuv.x = texture2D(tex_y, textureOut).r - 0.063;
+
+ yuv.x = texture2D(tex_y, textureOut).r;
#if defined(YUV_PATTERN_UV)
- yuv.y = texture2D(tex_u, textureOut).r - 0.500;
- yuv.z = texture2D(tex_u, textureOut).a - 0.500;
+ yuv.y = texture2D(tex_u, textureOut).r;
+ yuv.z = texture2D(tex_u, textureOut).a;
#elif defined(YUV_PATTERN_VU)
- yuv.y = texture2D(tex_u, textureOut).a - 0.500;
- yuv.z = texture2D(tex_u, textureOut).r - 0.500;
+ yuv.y = texture2D(tex_u, textureOut).a;
+ yuv.z = texture2D(tex_u, textureOut).r;
#else
#error Invalid pattern
#endif
- rgb = yuv2rgb_bt601_mat * yuv;
+ vec3 rgb = yuv2rgb_matrix * (vec3(y, uv) - yuv2rgb_offset);
+
gl_FragColor = vec4(rgb, 1.0);
}
diff --git a/src/qcam/assets/shader/YUV_3_planes.frag b/src/qcam/assets/shader/YUV_3_planes.frag
index 2be74b5d..e754129d 100644
--- a/src/qcam/assets/shader/YUV_3_planes.frag
+++ b/src/qcam/assets/shader/YUV_3_planes.frag
@@ -14,20 +14,23 @@ uniform sampler2D tex_y;
uniform sampler2D tex_u;
uniform sampler2D tex_v;
+const mat3 yuv2rgb_matrix = mat3(
+ YUV2RGB_MATRIX
+);
+
+const vec3 yuv2rgb_offset = vec3(
+ YUV2RGB_Y_OFFSET / 255.0, 128.0 / 255.0, 128.0 / 255.0
+);
+
void main(void)
{
vec3 yuv;
- vec3 rgb;
- mat3 yuv2rgb_bt601_mat = mat3(
- vec3(1.164, 1.164, 1.164),
- vec3(0.000, -0.392, 2.017),
- vec3(1.596, -0.813, 0.000)
- );
-
- yuv.x = texture2D(tex_y, textureOut).r - 0.063;
- yuv.y = texture2D(tex_u, textureOut).r - 0.500;
- yuv.z = texture2D(tex_v, textureOut).r - 0.500;
-
- rgb = yuv2rgb_bt601_mat * yuv;
+
+ yuv.x = texture2D(tex_y, textureOut).r;
+ yuv.y = texture2D(tex_u, textureOut).r;
+ yuv.z = texture2D(tex_v, textureOut).r;
+
+ vec3 rgb = yuv2rgb_matrix * (vec3(y, uv) - yuv2rgb_offset);
+
gl_FragColor = vec4(rgb, 1.0);
}
diff --git a/src/qcam/assets/shader/YUV_packed.frag b/src/qcam/assets/shader/YUV_packed.frag
index d6efd4ce..b9ef9d41 100644
--- a/src/qcam/assets/shader/YUV_packed.frag
+++ b/src/qcam/assets/shader/YUV_packed.frag
@@ -14,15 +14,16 @@ varying vec2 textureOut;
uniform sampler2D tex_y;
uniform vec2 tex_step;
+const mat3 yuv2rgb_matrix = mat3(
+ YUV2RGB_MATRIX
+);
+
+const vec3 yuv2rgb_offset = vec3(
+ YUV2RGB_Y_OFFSET / 255.0, 128.0 / 255.0, 128.0 / 255.0
+);
+
void main(void)
{
- mat3 yuv2rgb_bt601_mat = mat3(
- vec3(1.164, 1.164, 1.164),
- vec3(0.000, -0.392, 2.017),
- vec3(1.596, -0.813, 0.000)
- );
- vec3 yuv2rgb_bt601_offset = vec3(0.063, 0.500, 0.500);
-
/*
* The sampler won't interpolate the texture correctly along the X axis,
* as each RGBA pixel effectively stores two pixels. We thus need to
@@ -76,7 +77,7 @@ void main(void)
float y = mix(y_left, y_right, step(0.5, f_x));
- vec3 rgb = yuv2rgb_bt601_mat * (vec3(y, uv) - yuv2rgb_bt601_offset);
+ vec3 rgb = yuv2rgb_matrix * (vec3(y, uv) - yuv2rgb_offset);
gl_FragColor = vec4(rgb, 1.0);
}
diff --git a/src/qcam/cam_select_dialog.cpp b/src/qcam/cam_select_dialog.cpp
new file mode 100644
index 00000000..3c8b12a9
--- /dev/null
+++ b/src/qcam/cam_select_dialog.cpp
@@ -0,0 +1,111 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Utkarsh Tiwari <utkarsh02t@gmail.com>
+ *
+ * cam_select_dialog.cpp - qcam - Camera Selection dialog
+ */
+
+#include "cam_select_dialog.h"
+
+#include <memory>
+
+#include <libcamera/camera.h>
+#include <libcamera/camera_manager.h>
+
+#include <QComboBox>
+#include <QDialogButtonBox>
+#include <QFormLayout>
+#include <QLabel>
+#include <QString>
+
+CameraSelectorDialog::CameraSelectorDialog(libcamera::CameraManager *cameraManager,
+ QWidget *parent)
+ : QDialog(parent), cm_(cameraManager)
+{
+ /* Use a QFormLayout for the dialog. */
+ QFormLayout *layout = new QFormLayout(this);
+
+ /* Setup the camera id combo-box. */
+ cameraIdComboBox_ = new QComboBox;
+ for (const auto &cam : cm_->cameras())
+ cameraIdComboBox_->addItem(QString::fromStdString(cam->id()));
+
+ /* Set camera information labels. */
+ cameraLocation_ = new QLabel;
+ cameraModel_ = new QLabel;
+
+ updateCameraInfo(cameraIdComboBox_->currentText());
+ connect(cameraIdComboBox_, &QComboBox::currentTextChanged,
+ this, &CameraSelectorDialog::updateCameraInfo);
+
+ /* Setup the QDialogButton Box */
+ QDialogButtonBox *buttonBox =
+ new QDialogButtonBox(QDialogButtonBox::Ok |
+ QDialogButtonBox::Cancel);
+
+ connect(buttonBox, &QDialogButtonBox::accepted,
+ this, &QDialog::accept);
+ connect(buttonBox, &QDialogButtonBox::rejected,
+ this, &QDialog::reject);
+
+ /* Set the layout. */
+ layout->addRow("Camera:", cameraIdComboBox_);
+ layout->addRow("Location:", cameraLocation_);
+ layout->addRow("Model:", cameraModel_);
+ layout->addWidget(buttonBox);
+}
+
+CameraSelectorDialog::~CameraSelectorDialog() = default;
+
+std::string CameraSelectorDialog::getCameraId()
+{
+ return cameraIdComboBox_->currentText().toStdString();
+}
+
+/* Hotplug / Unplug Support. */
+void CameraSelectorDialog::addCamera(QString cameraId)
+{
+ cameraIdComboBox_->addItem(cameraId);
+}
+
+void CameraSelectorDialog::removeCamera(QString cameraId)
+{
+ int cameraIndex = cameraIdComboBox_->findText(cameraId);
+ cameraIdComboBox_->removeItem(cameraIndex);
+}
+
+/* Camera Information */
+void CameraSelectorDialog::updateCameraInfo(QString cameraId)
+{
+ const std::shared_ptr<libcamera::Camera> &camera =
+ cm_->get(cameraId.toStdString());
+
+ if (!camera)
+ return;
+
+ const libcamera::ControlList &properties = camera->properties();
+
+ const auto &location = properties.get(libcamera::properties::Location);
+ if (location) {
+ switch (*location) {
+ case libcamera::properties::CameraLocationFront:
+ cameraLocation_->setText("Internal front camera");
+ break;
+ case libcamera::properties::CameraLocationBack:
+ cameraLocation_->setText("Internal back camera");
+ break;
+ case libcamera::properties::CameraLocationExternal:
+ cameraLocation_->setText("External camera");
+ break;
+ default:
+ cameraLocation_->setText("Unknown");
+ }
+ } else {
+ cameraLocation_->setText("Unknown");
+ }
+
+ const auto &model = properties.get(libcamera::properties::Model)
+ .value_or("Unknown");
+
+ cameraModel_->setText(QString::fromStdString(model));
+}
diff --git a/src/qcam/cam_select_dialog.h b/src/qcam/cam_select_dialog.h
new file mode 100644
index 00000000..0b7709ed
--- /dev/null
+++ b/src/qcam/cam_select_dialog.h
@@ -0,0 +1,47 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Utkarsh Tiwari <utkarsh02t@gmail.com>
+ *
+ * cam_select_dialog.h - qcam - Camera Selection dialog
+ */
+
+#pragma once
+
+#include <string>
+
+#include <libcamera/camera.h>
+#include <libcamera/camera_manager.h>
+#include <libcamera/controls.h>
+#include <libcamera/property_ids.h>
+
+#include <QDialog>
+#include <QString>
+
+class QComboBox;
+class QLabel;
+
+class CameraSelectorDialog : public QDialog
+{
+ Q_OBJECT
+public:
+ CameraSelectorDialog(libcamera::CameraManager *cameraManager,
+ QWidget *parent);
+ ~CameraSelectorDialog();
+
+ std::string getCameraId();
+
+ /* Hotplug / Unplug Support. */
+ void addCamera(QString cameraId);
+ void removeCamera(QString cameraId);
+
+ /* Camera Information */
+ void updateCameraInfo(QString cameraId);
+
+private:
+ libcamera::CameraManager *cm_;
+
+ /* UI elements. */
+ QComboBox *cameraIdComboBox_;
+ QLabel *cameraLocation_;
+ QLabel *cameraModel_;
+};
diff --git a/src/qcam/main_window.cpp b/src/qcam/main_window.cpp
index 7433d647..e0e5092e 100644
--- a/src/qcam/main_window.cpp
+++ b/src/qcam/main_window.cpp
@@ -14,12 +14,10 @@
#include <libcamera/camera_manager.h>
#include <libcamera/version.h>
-#include <QComboBox>
#include <QCoreApplication>
#include <QFileDialog>
#include <QImage>
#include <QImageWriter>
-#include <QInputDialog>
#include <QMutexLocker>
#include <QStandardPaths>
#include <QStringList>
@@ -30,6 +28,7 @@
#include "../cam/image.h"
+#include "cam_select_dialog.h"
#include "dng_writer.h"
#ifndef QT_NO_OPENGL
#include "viewfinder_gl.h"
@@ -120,14 +119,14 @@ MainWindow::MainWindow(CameraManager *cm, const OptionsParser::Options &options)
if (renderType == "qt") {
ViewFinderQt *viewfinder = new ViewFinderQt(this);
connect(viewfinder, &ViewFinderQt::renderComplete,
- this, &MainWindow::queueRequest);
+ this, &MainWindow::renderComplete);
viewfinder_ = viewfinder;
setCentralWidget(viewfinder);
#ifndef QT_NO_OPENGL
} else if (renderType == "gles") {
ViewFinderGL *viewfinder = new ViewFinderGL(this);
connect(viewfinder, &ViewFinderGL::renderComplete,
- this, &MainWindow::queueRequest);
+ this, &MainWindow::renderComplete);
viewfinder_ = viewfinder;
setCentralWidget(viewfinder);
#endif
@@ -144,6 +143,8 @@ MainWindow::MainWindow(CameraManager *cm, const OptionsParser::Options &options)
cm_->cameraAdded.connect(this, &MainWindow::addCamera);
cm_->cameraRemoved.connect(this, &MainWindow::removeCamera);
+ cameraSelectorDialog_ = new CameraSelectorDialog(cm_, this);
+
/* Open the camera and start capture. */
ret = openCamera();
if (ret < 0) {
@@ -193,14 +194,11 @@ int MainWindow::createToolbars()
connect(action, &QAction::triggered, this, &MainWindow::quit);
/* Camera selector. */
- cameraCombo_ = new QComboBox();
- connect(cameraCombo_, QOverload<int>::of(&QComboBox::activated),
+ cameraSelectButton_ = new QPushButton;
+ connect(cameraSelectButton_, &QPushButton::clicked,
this, &MainWindow::switchCamera);
- for (const std::shared_ptr<Camera> &cam : cm_->cameras())
- cameraCombo_->addItem(QString::fromStdString(cam->id()));
-
- toolbar_->addWidget(cameraCombo_);
+ toolbar_->addWidget(cameraSelectButton_);
toolbar_->addSeparator();
@@ -260,14 +258,18 @@ void MainWindow::updateTitle()
* Camera Selection
*/
-void MainWindow::switchCamera(int index)
+void MainWindow::switchCamera()
{
/* Get and acquire the new camera. */
- const auto &cameras = cm_->cameras();
- if (static_cast<unsigned int>(index) >= cameras.size())
+ std::string newCameraId = chooseCamera();
+
+ if (newCameraId.empty())
+ return;
+
+ if (camera_ && newCameraId == camera_->id())
return;
- const std::shared_ptr<Camera> &cam = cameras[index];
+ const std::shared_ptr<Camera> &cam = cm_->get(newCameraId);
if (cam->acquire()) {
qInfo() << "Failed to acquire camera" << cam->id().c_str();
@@ -282,32 +284,23 @@ void MainWindow::switchCamera(int index)
*/
startStopAction_->setChecked(false);
- camera_->release();
+ if (camera_)
+ camera_->release();
+
camera_ = cam;
startStopAction_->setChecked(true);
+
+ /* Display the current cameraId in the toolbar .*/
+ cameraSelectButton_->setText(QString::fromStdString(newCameraId));
}
std::string MainWindow::chooseCamera()
{
- QStringList cameras;
- bool result;
-
- /* If only one camera is available, use it automatically. */
- if (cm_->cameras().size() == 1)
- return cm_->cameras()[0]->id();
-
- /* Present a dialog box to pick a camera. */
- for (const std::shared_ptr<Camera> &cam : cm_->cameras())
- cameras.append(QString::fromStdString(cam->id()));
-
- QString id = QInputDialog::getItem(this, "Select Camera",
- "Camera:", cameras, 0,
- false, &result);
- if (!result)
+ if (cameraSelectorDialog_->exec() != QDialog::Accepted)
return std::string();
- return id.toStdString();
+ return cameraSelectorDialog_->getCameraId();
}
int MainWindow::openCamera()
@@ -339,8 +332,8 @@ int MainWindow::openCamera()
return -EBUSY;
}
- /* Set the combo-box entry with the currently selected Camera. */
- cameraCombo_->setCurrentText(QString::fromStdString(cameraName));
+ /* Set the camera switch button with the currently selected Camera id. */
+ cameraSelectButton_->setText(QString::fromStdString(cameraName));
return 0;
}
@@ -447,9 +440,13 @@ int MainWindow::startCapture()
else
rawStream_ = nullptr;
- /* Configure the viewfinder. */
+ /*
+ * Configure the viewfinder. If no color space is reported, default to
+ * sYCC.
+ */
ret = viewfinder_->setFormat(vfConfig.pixelFormat,
QSize(vfConfig.size.width, vfConfig.size.height),
+ vfConfig.colorSpace.value_or(ColorSpace::Sycc),
vfConfig.stride);
if (ret < 0) {
qInfo() << "Failed to set viewfinder format";
@@ -522,7 +519,7 @@ int MainWindow::startCapture()
/* Queue all requests. */
for (std::unique_ptr<Request> &request : requests_) {
- ret = camera_->queueRequest(request.get());
+ ret = queueRequest(request.get());
if (ret < 0) {
qWarning() << "Can't queue request";
goto error_disconnect;
@@ -602,21 +599,20 @@ void MainWindow::stopCapture()
void MainWindow::processHotplug(HotplugEvent *e)
{
Camera *camera = e->camera();
+ QString cameraId = QString::fromStdString(camera->id());
HotplugEvent::PlugEvent event = e->hotplugEvent();
if (event == HotplugEvent::HotPlug) {
- cameraCombo_->addItem(QString::fromStdString(camera->id()));
+ cameraSelectorDialog_->addCamera(cameraId);
} else if (event == HotplugEvent::HotUnplug) {
/* Check if the currently-streaming camera is removed. */
if (camera == camera_.get()) {
toggleCapture(false);
camera_->release();
camera_.reset();
- cameraCombo_->setCurrentIndex(0);
}
- int camIndex = cameraCombo_->findText(QString::fromStdString(camera->id()));
- cameraCombo_->removeItem(camIndex);
+ cameraSelectorDialog_->removeCamera(cameraId);
}
}
@@ -756,7 +752,7 @@ void MainWindow::processViewfinder(FrameBuffer *buffer)
viewfinder_->render(buffer, mappedBuffers_[buffer].get());
}
-void MainWindow::queueRequest(FrameBuffer *buffer)
+void MainWindow::renderComplete(FrameBuffer *buffer)
{
Request *request;
{
@@ -785,6 +781,10 @@ void MainWindow::queueRequest(FrameBuffer *buffer)
qWarning() << "No free buffer available for RAW capture";
}
}
+ queueRequest(request);
+}
- camera_->queueRequest(request);
+int MainWindow::queueRequest(Request *request)
+{
+ return camera_->queueRequest(request);
}
diff --git a/src/qcam/main_window.h b/src/qcam/main_window.h
index fc70920f..95b64124 100644
--- a/src/qcam/main_window.h
+++ b/src/qcam/main_window.h
@@ -23,6 +23,7 @@
#include <QMainWindow>
#include <QMutex>
#include <QObject>
+#include <QPushButton>
#include <QQueue>
#include <QTimer>
@@ -31,8 +32,8 @@
#include "viewfinder.h"
class QAction;
-class QComboBox;
+class CameraSelectorDialog;
class Image;
class HotplugEvent;
@@ -59,7 +60,7 @@ private Q_SLOTS:
void quit();
void updateTitle();
- void switchCamera(int index);
+ void switchCamera();
void toggleCapture(bool start);
void saveImageAs();
@@ -67,7 +68,7 @@ private Q_SLOTS:
void processRaw(libcamera::FrameBuffer *buffer,
const libcamera::ControlList &metadata);
- void queueRequest(libcamera::FrameBuffer *buffer);
+ void renderComplete(libcamera::FrameBuffer *buffer);
private:
int createToolbars();
@@ -81,6 +82,7 @@ private:
void addCamera(std::shared_ptr<libcamera::Camera> camera);
void removeCamera(std::shared_ptr<libcamera::Camera> camera);
+ int queueRequest(libcamera::Request *request);
void requestComplete(libcamera::Request *request);
void processCapture();
void processHotplug(HotplugEvent *e);
@@ -89,7 +91,7 @@ private:
/* UI elements */
QToolBar *toolbar_;
QAction *startStopAction_;
- QComboBox *cameraCombo_;
+ QPushButton *cameraSelectButton_;
QAction *saveRaw_;
ViewFinder *viewfinder_;
@@ -99,6 +101,8 @@ private:
QString title_;
QTimer titleTimer_;
+ CameraSelectorDialog *cameraSelectorDialog_;
+
/* Options */
const OptionsParser::Options &options_;
diff --git a/src/qcam/meson.build b/src/qcam/meson.build
index c46f4631..61861ea6 100644
--- a/src/qcam/meson.build
+++ b/src/qcam/meson.build
@@ -18,6 +18,7 @@ qcam_sources = files([
'../cam/image.cpp',
'../cam/options.cpp',
'../cam/stream_options.cpp',
+ 'cam_select_dialog.cpp',
'format_converter.cpp',
'main.cpp',
'main_window.cpp',
@@ -26,6 +27,7 @@ qcam_sources = files([
])
qcam_moc_headers = files([
+ 'cam_select_dialog.h',
'main_window.h',
'viewfinder_qt.h',
])
diff --git a/src/qcam/viewfinder.h b/src/qcam/viewfinder.h
index 260074ae..a57446e8 100644
--- a/src/qcam/viewfinder.h
+++ b/src/qcam/viewfinder.h
@@ -11,6 +11,7 @@
#include <QList>
#include <QSize>
+#include <libcamera/color_space.h>
#include <libcamera/formats.h>
#include <libcamera/framebuffer.h>
@@ -24,6 +25,7 @@ public:
virtual const QList<libcamera::PixelFormat> &nativeFormats() const = 0;
virtual int setFormat(const libcamera::PixelFormat &format, const QSize &size,
+ const libcamera::ColorSpace &colorSpace,
unsigned int stride) = 0;
virtual void render(libcamera::FrameBuffer *buffer, Image *image) = 0;
virtual void stop() = 0;
diff --git a/src/qcam/viewfinder_gl.cpp b/src/qcam/viewfinder_gl.cpp
index 3ae8b03a..38ddad58 100644
--- a/src/qcam/viewfinder_gl.cpp
+++ b/src/qcam/viewfinder_gl.cpp
@@ -7,9 +7,12 @@
#include "viewfinder_gl.h"
+#include <array>
+
#include <QByteArray>
#include <QFile>
#include <QImage>
+#include <QStringList>
#include <libcamera/formats.h>
@@ -56,7 +59,8 @@ static const QList<libcamera::PixelFormat> supportedFormats{
};
ViewFinderGL::ViewFinderGL(QWidget *parent)
- : QOpenGLWidget(parent), buffer_(nullptr), image_(nullptr),
+ : QOpenGLWidget(parent), buffer_(nullptr),
+ colorSpace_(libcamera::ColorSpace::Raw), image_(nullptr),
vertexBuffer_(QOpenGLBuffer::VertexBuffer)
{
}
@@ -71,10 +75,11 @@ const QList<libcamera::PixelFormat> &ViewFinderGL::nativeFormats() const
return supportedFormats;
}
-int ViewFinderGL::setFormat(const libcamera::PixelFormat &format,
- const QSize &size, unsigned int stride)
+int ViewFinderGL::setFormat(const libcamera::PixelFormat &format, const QSize &size,
+ const libcamera::ColorSpace &colorSpace,
+ unsigned int stride)
{
- if (format != format_) {
+ if (format != format_ || colorSpace != colorSpace_) {
/*
* If the fragment already exists, remove it and create a new
* one for the new format.
@@ -88,7 +93,10 @@ int ViewFinderGL::setFormat(const libcamera::PixelFormat &format,
if (!selectFormat(format))
return -1;
+ selectColorSpace(colorSpace);
+
format_ = format;
+ colorSpace_ = colorSpace;
}
size_ = size;
@@ -317,6 +325,74 @@ bool ViewFinderGL::selectFormat(const libcamera::PixelFormat &format)
return ret;
}
+void ViewFinderGL::selectColorSpace(const libcamera::ColorSpace &colorSpace)
+{
+ std::array<double, 9> yuv2rgb;
+
+ /* OpenGL stores arrays in column-major order. */
+ switch (colorSpace.ycbcrEncoding) {
+ case libcamera::ColorSpace::YcbcrEncoding::None:
+ default:
+ yuv2rgb = {
+ 1.0000, 0.0000, 0.0000,
+ 0.0000, 1.0000, 0.0000,
+ 0.0000, 0.0000, 1.0000,
+ };
+ break;
+
+ case libcamera::ColorSpace::YcbcrEncoding::Rec601:
+ yuv2rgb = {
+ 1.0000, 1.0000, 1.0000,
+ 0.0000, -0.3441, 1.7720,
+ 1.4020, -0.7141, 0.0000,
+ };
+ break;
+
+ case libcamera::ColorSpace::YcbcrEncoding::Rec709:
+ yuv2rgb = {
+ 1.0000, 1.0000, 1.0000,
+ 0.0000, -0.1873, 1.8856,
+ 1.5748, -0.4681, 0.0000,
+ };
+ break;
+
+ case libcamera::ColorSpace::YcbcrEncoding::Rec2020:
+ yuv2rgb = {
+ 1.0000, 1.0000, 1.0000,
+ 0.0000, -0.1646, 1.8814,
+ 1.4746, -0.5714, 0.0000,
+ };
+ break;
+ }
+
+ double offset;
+
+ switch (colorSpace.range) {
+ case libcamera::ColorSpace::Range::Full:
+ default:
+ offset = 0.0;
+ break;
+
+ case libcamera::ColorSpace::Range::Limited:
+ offset = 16.0;
+
+ for (unsigned int i = 0; i < 3; ++i)
+ yuv2rgb[i] *= 255.0 / 219.0;
+ for (unsigned int i = 4; i < 9; ++i)
+ yuv2rgb[i] *= 255.0 / 224.0;
+ break;
+ }
+
+ QStringList matrix;
+
+ for (double coeff : yuv2rgb)
+ matrix.append(QString::number(coeff, 'f'));
+
+ fragmentShaderDefines_.append("#define YUV2RGB_MATRIX " + matrix.join(", "));
+ fragmentShaderDefines_.append(QString("#define YUV2RGB_Y_OFFSET %1")
+ .arg(offset, 0, 'f', 1));
+}
+
bool ViewFinderGL::createVertexShader()
{
/* Create Vertex Shader */
diff --git a/src/qcam/viewfinder_gl.h b/src/qcam/viewfinder_gl.h
index 0a9275ba..68c2912d 100644
--- a/src/qcam/viewfinder_gl.h
+++ b/src/qcam/viewfinder_gl.h
@@ -39,6 +39,7 @@ public:
const QList<libcamera::PixelFormat> &nativeFormats() const override;
int setFormat(const libcamera::PixelFormat &format, const QSize &size,
+ const libcamera::ColorSpace &colorSpace,
unsigned int stride) override;
void render(libcamera::FrameBuffer *buffer, Image *image) override;
void stop() override;
@@ -56,6 +57,7 @@ protected:
private:
bool selectFormat(const libcamera::PixelFormat &format);
+ void selectColorSpace(const libcamera::ColorSpace &colorSpace);
void configureTexture(QOpenGLTexture &texture);
bool createFragmentShader();
@@ -66,6 +68,7 @@ private:
/* Captured image size, format and buffer */
libcamera::FrameBuffer *buffer_;
libcamera::PixelFormat format_;
+ libcamera::ColorSpace colorSpace_;
QSize size_;
unsigned int stride_;
Image *image_;
diff --git a/src/qcam/viewfinder_qt.cpp b/src/qcam/viewfinder_qt.cpp
index 7a6a60c9..c20fd6bc 100644
--- a/src/qcam/viewfinder_qt.cpp
+++ b/src/qcam/viewfinder_qt.cpp
@@ -54,8 +54,9 @@ const QList<libcamera::PixelFormat> &ViewFinderQt::nativeFormats() const
return formats;
}
-int ViewFinderQt::setFormat(const libcamera::PixelFormat &format,
- const QSize &size, unsigned int stride)
+int ViewFinderQt::setFormat(const libcamera::PixelFormat &format, const QSize &size,
+ [[maybe_unused]] const libcamera::ColorSpace &colorSpace,
+ unsigned int stride)
{
image_ = QImage();
diff --git a/src/qcam/viewfinder_qt.h b/src/qcam/viewfinder_qt.h
index 8c621452..eb3a9988 100644
--- a/src/qcam/viewfinder_qt.h
+++ b/src/qcam/viewfinder_qt.h
@@ -32,6 +32,7 @@ public:
const QList<libcamera::PixelFormat> &nativeFormats() const override;
int setFormat(const libcamera::PixelFormat &format, const QSize &size,
+ const libcamera::ColorSpace &colorSpace,
unsigned int stride) override;
void render(libcamera::FrameBuffer *buffer, Image *image) override;
void stop() override;

Privacy Policy