Files
yoctor-layers/meta-st/meta-st-openstlinux/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0024-v4l2codecs-Add-v4l2-encoder-class.patch
2024-07-11 14:16:35 +02:00

1583 lines
48 KiB
Diff

From 9697eee045811ee5153002529f4e4bf8afa7193a Mon Sep 17 00:00:00 2001
From: Benjamin Gaignard <benjamin.gaignard@collabora.com>
Date: Mon, 16 Jan 2023 17:36:14 +0100
Subject: [PATCH 3/5] v4l2codecs: Add v4l2 encoder class
Add v4l2 encoder helpers to be used later by v4l2 encoders.
---
sys/v4l2codecs/gstv4l2codecallocator.c | 105 +-
sys/v4l2codecs/gstv4l2codecallocator.h | 7 +-
sys/v4l2codecs/gstv4l2encoder.c | 1216 ++++++++++++++++++++++++
sys/v4l2codecs/gstv4l2encoder.h | 139 +++
sys/v4l2codecs/meson.build | 1 +
5 files changed, 1463 insertions(+), 5 deletions(-)
create mode 100644 sys/v4l2codecs/gstv4l2encoder.c
create mode 100644 sys/v4l2codecs/gstv4l2encoder.h
diff --git a/sys/v4l2codecs/gstv4l2codecallocator.c b/sys/v4l2codecs/gstv4l2codecallocator.c
index 40238bd..e0fb371 100644
--- a/sys/v4l2codecs/gstv4l2codecallocator.c
+++ b/sys/v4l2codecs/gstv4l2codecallocator.c
@@ -49,6 +49,7 @@ struct _GstV4l2CodecAllocator
gboolean flushing;
GstV4l2Decoder *decoder;
+ GstV4l2Encoder *encoder;
GstPadDirection direction;
};
@@ -110,6 +111,44 @@ gst_v4l2_codec_buffer_new (GstAllocator * allocator, GstV4l2Decoder * decoder,
return buf;
}
+static GstV4l2CodecBuffer *
+gst_v4l2_codec_encoder_buffer_new (GstAllocator * allocator,
+ GstV4l2Encoder * encoder, GstPadDirection direction, gint index)
+{
+ GstV4l2CodecBuffer *buf;
+ guint i, num_mems;
+ gint fds[GST_VIDEO_MAX_PLANES];
+ gsize sizes[GST_VIDEO_MAX_PLANES];
+ gsize offsets[GST_VIDEO_MAX_PLANES];
+
+ if (!gst_v4l2_encoder_export_buffer (encoder, direction, index, fds, sizes,
+ offsets, &num_mems))
+ return NULL;
+
+ buf = g_new0 (GstV4l2CodecBuffer, 1);
+ buf->index = index;
+ buf->num_mems = num_mems;
+ for (i = 0; i < buf->num_mems; i++) {
+ GstMemory *mem = gst_fd_allocator_alloc (allocator, fds[i], sizes[i],
+ GST_FD_MEMORY_FLAG_KEEP_MAPPED);
+ gst_memory_resize (mem, offsets[i], sizes[i] - offsets[i]);
+
+ GST_MINI_OBJECT (mem)->dispose = gst_v4l2_codec_allocator_release;
+ gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
+ gst_v4l2_codec_buffer_quark (), buf, NULL);
+
+ /* On outstanding memory keeps a reference on the allocator, this is
+ * needed to break the cycle. */
+ gst_object_unref (mem->allocator);
+ buf->mem[i] = mem;
+ }
+
+ GST_DEBUG_OBJECT (allocator, "Create buffer %i with %i memory fds",
+ buf->index, buf->num_mems);
+
+ return buf;
+}
+
static void
gst_v4l2_codec_buffer_free (GstV4l2CodecBuffer * buf)
{
@@ -169,7 +208,7 @@ gst_v4l2_codec_allocator_release (GstMiniObject * mini_object)
}
static gboolean
-gst_v4l2_codec_allocator_prepare (GstV4l2CodecAllocator * self)
+gst_v4l2_codec_decoder_allocator_prepare (GstV4l2CodecAllocator * self)
{
GstV4l2Decoder *decoder = self->decoder;
GstPadDirection direction = self->direction;
@@ -198,6 +237,37 @@ failed:
return FALSE;
}
+static gboolean
+gst_v4l2_codec_encoder_allocator_prepare (GstV4l2CodecAllocator * self)
+{
+ GstV4l2Encoder *encoder = self->encoder;
+ GstPadDirection direction = self->direction;
+ gint ret;
+ guint i;
+
+ ret = gst_v4l2_encoder_request_buffers (encoder, direction, self->pool_size);
+ if (ret < self->pool_size) {
+ if (ret >= 0)
+ GST_ERROR_OBJECT (self,
+ "%i buffer was needed, but only %i could be allocated",
+ self->pool_size, ret);
+ goto failed;
+ }
+
+ for (i = 0; i < self->pool_size; i++) {
+ GstV4l2CodecBuffer *buf =
+ gst_v4l2_codec_encoder_buffer_new (GST_ALLOCATOR (self),
+ encoder, direction, i);
+ g_queue_push_tail (&self->pool, buf);
+ }
+
+ return TRUE;
+
+failed:
+ gst_v4l2_encoder_request_buffers (encoder, direction, 0);
+ return FALSE;
+}
+
static void
gst_v4l2_codec_allocator_init (GstV4l2CodecAllocator * self)
{
@@ -218,6 +288,11 @@ gst_v4l2_codec_allocator_dispose (GObject * object)
gst_clear_object (&self->decoder);
}
+ if (self->encoder) {
+ gst_v4l2_codec_allocator_detach (self);
+ gst_clear_object (&self->encoder);
+ }
+
G_OBJECT_CLASS (gst_v4l2_codec_allocator_parent_class)->dispose (object);
}
@@ -253,7 +328,26 @@ gst_v4l2_codec_allocator_new (GstV4l2Decoder * decoder,
self->direction = direction;
self->pool_size = num_buffers;
- if (!gst_v4l2_codec_allocator_prepare (self)) {
+ if (!gst_v4l2_codec_decoder_allocator_prepare (self)) {
+ g_object_unref (self);
+ return NULL;
+ }
+
+ return self;
+}
+
+GstV4l2CodecAllocator *
+gst_v4l2_codec_encoder_allocator_new (GstV4l2Encoder * encoder,
+ GstPadDirection direction, guint num_buffers)
+{
+ GstV4l2CodecAllocator *self =
+ g_object_new (GST_TYPE_V4L2_CODEC_ALLOCATOR, NULL);
+
+ self->encoder = g_object_ref (encoder);
+ self->direction = direction;
+ self->pool_size = num_buffers;
+
+ if (!gst_v4l2_codec_encoder_allocator_prepare (self)) {
g_object_unref (self);
return NULL;
}
@@ -348,7 +442,12 @@ gst_v4l2_codec_allocator_detach (GstV4l2CodecAllocator * self)
GST_OBJECT_LOCK (self);
if (!self->detached) {
self->detached = TRUE;
- gst_v4l2_decoder_request_buffers (self->decoder, self->direction, 0);
+ if (self->decoder) {
+ gst_v4l2_decoder_request_buffers (self->decoder, self->direction, 0);
+ }
+ if (self->encoder) {
+ gst_v4l2_encoder_request_buffers (self->encoder, self->direction, 0);
+ }
}
GST_OBJECT_UNLOCK (self);
}
diff --git a/sys/v4l2codecs/gstv4l2codecallocator.h b/sys/v4l2codecs/gstv4l2codecallocator.h
index 97fe1d2..8a099f0 100644
--- a/sys/v4l2codecs/gstv4l2codecallocator.h
+++ b/sys/v4l2codecs/gstv4l2codecallocator.h
@@ -26,6 +26,7 @@
#include "gstv4l2codecdevice.h"
#include "gstv4l2decoder.h"
+#include "gstv4l2encoder.h"
#define GST_TYPE_V4L2_CODEC_ALLOCATOR gst_v4l2_codec_allocator_get_type ()
G_DECLARE_FINAL_TYPE (GstV4l2CodecAllocator, gst_v4l2_codec_allocator,
@@ -36,9 +37,11 @@ GstV4l2CodecAllocator *gst_v4l2_codec_allocator_new (GstV4l2Decoder * decoder,
GstPadDirection direction,
guint num_buffers);
-GstMemory *gst_v4l2_codec_allocator_alloc (GstV4l2CodecAllocator * allocator);
-
+GstV4l2CodecAllocator *gst_v4l2_codec_encoder_allocator_new (GstV4l2Encoder * encoder,
+ GstPadDirection direction,
+ guint num_buffers);
+GstMemory *gst_v4l2_codec_allocator_alloc (GstV4l2CodecAllocator * allocator);
gboolean gst_v4l2_codec_allocator_create_buffer (GstV4l2CodecAllocator * self);
diff --git a/sys/v4l2codecs/gstv4l2encoder.c b/sys/v4l2codecs/gstv4l2encoder.c
new file mode 100644
index 0000000..5ae2cfa
--- /dev/null
+++ b/sys/v4l2codecs/gstv4l2encoder.c
@@ -0,0 +1,1216 @@
+/* GStreamer
+ * Copyright (C) 2022 Benjamin Gaignard <benjamin.gaignard@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "gstv4l2codecallocator.h"
+#include "gstv4l2codecpool.h"
+#include "gstv4l2encoder.h"
+#include "gstv4l2format.h"
+#include "linux/media.h"
+#include "linux/videodev2.h"
+
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <gst/base/base.h>
+
+GST_DEBUG_CATEGORY (v4l2_encoder_debug);
+#define GST_CAT_DEFAULT v4l2_encoder_debug
+
+enum
+{
+ PROP_0,
+ PROP_MEDIA_DEVICE,
+ PROP_VIDEO_DEVICE,
+};
+
+struct _GstV4l2Request
+{
+ /* non-thread safe */
+ gint ref_count;
+
+ GstV4l2Encoder *encoder;
+ gint fd;
+ guint32 frame_num;
+ GstBuffer *bitstream;
+ GstBuffer *pic_buf;
+ GstPoll *poll;
+ GstPollFD pollfd;
+
+ /* request state */
+ gboolean pending;
+ gboolean failed;
+ gboolean hold_pic_buf;
+ gboolean sub_request;
+};
+
+struct _GstV4l2Encoder
+{
+ GstObject parent;
+
+ gboolean opened;
+ gint media_fd;
+ gint video_fd;
+ GstQueueArray *request_pool;
+ GstQueueArray *pending_requests;
+ guint version;
+
+ enum v4l2_buf_type src_buf_type;
+ enum v4l2_buf_type sink_buf_type;
+ gboolean mplane;
+
+ /* properties */
+ gchar *media_device;
+ gchar *video_device;
+ guint render_delay;
+
+ /* detected features */
+ gboolean supports_holding_capture;
+};
+
+G_DEFINE_TYPE_WITH_CODE (GstV4l2Encoder, gst_v4l2_encoder, GST_TYPE_OBJECT,
+ GST_DEBUG_CATEGORY_INIT (v4l2_encoder_debug, "v4l2codecs-encoder", 0,
+ "V4L2 stateless encoder helper"));
+
+static void gst_v4l2_encoder_request_free (GstV4l2Request * request);
+
+static guint32
+direction_to_buffer_type (GstV4l2Encoder * self, GstPadDirection direction)
+{
+ if (direction == GST_PAD_SRC)
+ return self->src_buf_type;
+ else
+ return self->sink_buf_type;
+}
+
+static void
+gst_v4l2_encoder_finalize (GObject * obj)
+{
+ GstV4l2Encoder *self = GST_V4L2_ENCODER (obj);
+
+ gst_v4l2_encoder_close (self);
+
+ g_free (self->media_device);
+ g_free (self->video_device);
+ gst_queue_array_free (self->request_pool);
+ gst_queue_array_free (self->pending_requests);
+
+ G_OBJECT_CLASS (gst_v4l2_encoder_parent_class)->finalize (obj);
+}
+
+static void
+gst_v4l2_encoder_init (GstV4l2Encoder * self)
+{
+ self->request_pool = gst_queue_array_new (16);
+ self->pending_requests = gst_queue_array_new (16);
+}
+
+static void
+gst_v4l2_encoder_class_init (GstV4l2EncoderClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->finalize = gst_v4l2_encoder_finalize;
+ gobject_class->get_property = gst_v4l2_encoder_get_property;
+ gobject_class->set_property = gst_v4l2_encoder_set_property;
+
+ gst_v4l2_encoder_install_properties (gobject_class, 0, NULL);
+}
+
+GstV4l2Encoder *
+gst_v4l2_encoder_new (GstV4l2CodecDevice * device)
+{
+ GstV4l2Encoder *encoder;
+
+ g_return_val_if_fail (device->function == MEDIA_ENT_F_PROC_VIDEO_ENCODER,
+ NULL);
+
+ encoder = g_object_new (GST_TYPE_V4L2_ENCODER,
+ "media-device", device->media_device_path,
+ "video-device", device->video_device_path, NULL);
+
+ return gst_object_ref_sink (encoder);
+}
+
+guint
+gst_v4l2_encoder_get_version (GstV4l2Encoder * self)
+{
+ return self->version;
+}
+
+gboolean
+gst_v4l2_encoder_open (GstV4l2Encoder * self)
+{
+ gint ret;
+ struct v4l2_capability querycap;
+ guint32 capabilities;
+
+ self->media_fd = open (self->media_device, 0);
+ if (self->media_fd < 0) {
+ GST_ERROR_OBJECT (self, "Failed to open '%s': %s",
+ self->media_device, g_strerror (errno));
+ return FALSE;
+ }
+
+ self->video_fd = open (self->video_device, O_NONBLOCK);
+ if (self->video_fd < 0) {
+ GST_ERROR_OBJECT (self, "Failed to open '%s': %s",
+ self->video_device, g_strerror (errno));
+ return FALSE;
+ }
+
+ ret = ioctl (self->video_fd, VIDIOC_QUERYCAP, &querycap);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_QUERYCAP failed: %s", g_strerror (errno));
+ gst_v4l2_encoder_close (self);
+ return FALSE;
+ }
+
+ self->version = querycap.version;
+
+ if (querycap.capabilities & V4L2_CAP_DEVICE_CAPS)
+ capabilities = querycap.device_caps;
+ else
+ capabilities = querycap.capabilities;
+
+ if (capabilities & V4L2_CAP_VIDEO_M2M_MPLANE) {
+ self->sink_buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ self->src_buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ self->mplane = TRUE;
+ } else if (capabilities & V4L2_CAP_VIDEO_M2M) {
+ self->sink_buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ self->src_buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ self->mplane = FALSE;
+ } else {
+ GST_ERROR_OBJECT (self, "Unsupported memory-2-memory device.");
+ gst_v4l2_encoder_close (self);
+ return FALSE;
+ }
+
+ self->opened = TRUE;
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_close (GstV4l2Encoder * self)
+{
+ GstV4l2Request *request;
+
+ while ((request = gst_queue_array_pop_head (self->pending_requests)))
+ gst_v4l2_encoder_request_unref (request);
+
+ while ((request = gst_queue_array_pop_head (self->request_pool)))
+ gst_v4l2_encoder_request_free (request);
+
+ if (self->media_fd)
+ close (self->media_fd);
+ if (self->video_fd)
+ close (self->video_fd);
+
+ self->media_fd = 0;
+ self->video_fd = 0;
+ self->opened = FALSE;
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_streamon (GstV4l2Encoder * self, GstPadDirection direction)
+{
+ gint ret;
+ guint32 type = direction_to_buffer_type (self, direction);
+
+ ret = ioctl (self->video_fd, VIDIOC_STREAMON, &type);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_STREAMON failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_streamoff (GstV4l2Encoder * self, GstPadDirection direction)
+{
+ guint32 type = direction_to_buffer_type (self, direction);
+ gint ret;
+
+ if (direction == GST_PAD_SRC) {
+ GstV4l2Request *pending_req;
+
+ /* STREAMOFF have the effect of cancelling all requests and unqueuing all
+ * buffers, so clear the pending request list */
+ while ((pending_req = gst_queue_array_pop_head (self->pending_requests))) {
+ pending_req->pending = FALSE;
+ gst_v4l2_encoder_request_unref (pending_req);
+ }
+ }
+
+ ret = ioctl (self->video_fd, VIDIOC_STREAMOFF, &type);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_STREAMOFF failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_flush (GstV4l2Encoder * self)
+{
+ /* We ignore streamoff failure as it's not relevant, if we manage to
+ * streamon again, we are good. */
+ gst_v4l2_encoder_streamoff (self, GST_PAD_SINK);
+ gst_v4l2_encoder_streamoff (self, GST_PAD_SRC);
+
+ return gst_v4l2_encoder_streamon (self, GST_PAD_SINK) &&
+ gst_v4l2_encoder_streamon (self, GST_PAD_SRC);
+}
+
+GstCaps *
+gst_v4l2_encoder_list_sink_formats (GstV4l2Encoder * self)
+{
+ gint ret;
+ struct v4l2_format fmt = {
+ .type = self->sink_buf_type,
+ };
+ GstVideoFormat format;
+ GstCaps *caps;
+ GValue list = G_VALUE_INIT;
+ GValue value = G_VALUE_INIT;
+ gint i;
+
+ if (!self->opened)
+ return NULL;
+
+ ret = ioctl (self->video_fd, VIDIOC_G_FMT, &fmt);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_G_FMT failed: %s", g_strerror (errno));
+ return NULL;
+ }
+
+ /* We first place a structure with the default pixel format */
+ if (gst_v4l2_format_to_video_format (fmt.fmt.pix_mp.pixelformat, &format))
+ caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING,
+ gst_video_format_to_string (format), NULL);
+ else
+ caps = gst_caps_new_empty ();
+
+ /* And then enumerate other possible formats and place that as a second
+ * structure in the caps */
+ g_value_init (&list, GST_TYPE_LIST);
+ g_value_init (&value, G_TYPE_STRING);
+
+ for (i = 0; ret >= 0; i++) {
+ struct v4l2_fmtdesc fmtdesc = { i, self->sink_buf_type, };
+
+ ret = ioctl (self->video_fd, VIDIOC_ENUM_FMT, &fmtdesc);
+ if (ret < 0) {
+ if (errno != EINVAL)
+ GST_ERROR_OBJECT (self, "VIDIOC_ENUM_FMT failed: %s",
+ g_strerror (errno));
+ continue;
+ }
+
+ if (gst_v4l2_format_to_video_format (fmtdesc.pixelformat, &format)) {
+ g_value_set_static_string (&value, gst_video_format_to_string (format));
+ gst_value_list_append_value (&list, &value);
+ }
+ }
+ g_value_reset (&value);
+
+ if (gst_value_list_get_size (&list) > 0) {
+ GstStructure *str = gst_structure_new_empty ("video/x-raw");
+ gst_structure_take_value (str, "format", &list);
+ gst_caps_append_structure (caps, str);
+ } else {
+ g_value_reset (&list);
+ }
+
+ return caps;
+}
+
+gboolean
+gst_v4l2_encoder_enum_sink_fmt (GstV4l2Encoder * self, gint i,
+ guint32 * out_fmt)
+{
+ struct v4l2_fmtdesc fmtdesc = { i, self->sink_buf_type, };
+ gint ret;
+
+ g_return_val_if_fail (self->opened, FALSE);
+
+ ret = ioctl (self->video_fd, VIDIOC_ENUM_FMT, &fmtdesc);
+ if (ret < 0) {
+ if (errno != EINVAL)
+ GST_ERROR_OBJECT (self, "VIDIOC_ENUM_FMT failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (self, "Found format %" GST_FOURCC_FORMAT " (%s)",
+ GST_FOURCC_ARGS (fmtdesc.pixelformat), fmtdesc.description);
+ *out_fmt = fmtdesc.pixelformat;
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_select_sink_format (GstV4l2Encoder * self, GstVideoInfo * in,
+ GstVideoInfo * out)
+{
+ gint ret;
+ struct v4l2_format fmt = {
+ .type = self->sink_buf_type,
+ };
+ guint32 pix_fmt;
+ gint width = in->width;
+ gint height = in->height;
+
+ ret = ioctl (self->video_fd, VIDIOC_G_FMT, &fmt);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_G_FMT failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ if (!gst_v4l2_format_from_video_format (in->finfo->format, &pix_fmt)) {
+ GST_ERROR_OBJECT (self, "Unsupported pixelformat %s",
+ gst_video_format_to_string (in->finfo->format));
+ return FALSE;
+ }
+
+ if (pix_fmt != fmt.fmt.pix_mp.pixelformat
+ || fmt.fmt.pix_mp.width != width || fmt.fmt.pix_mp.height != height) {
+ GST_DEBUG_OBJECT (self,
+ "Trying to use peer format: %" GST_FOURCC_FORMAT " %ix%i",
+ GST_FOURCC_ARGS (pix_fmt), width, height);
+ fmt.fmt.pix_mp.pixelformat = pix_fmt;
+ fmt.fmt.pix_mp.width = width;
+ fmt.fmt.pix_mp.height = height;
+ ret = ioctl (self->video_fd, VIDIOC_S_FMT, &fmt);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_S_FMT failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+ }
+
+ if (!gst_v4l2_format_to_video_info (&fmt, out)) {
+ GST_ERROR_OBJECT (self, "Unsupported V4L2 pixelformat %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fmt.fmt.pix_mp.pixelformat));
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (self, "Selected sink format %s %ix%i",
+ gst_video_format_to_string (out->finfo->format), out->width, out->height);
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_enum_src_formats (GstV4l2Encoder * self, gint i,
+ guint32 * out_fmt)
+{
+ struct v4l2_fmtdesc fmtdesc = { i, self->src_buf_type, };
+ gint ret;
+
+ ret = ioctl (self->video_fd, VIDIOC_ENUM_FMT, &fmtdesc);
+ if (ret < 0) {
+ if (errno != EINVAL)
+ GST_ERROR_OBJECT (self, "VIDIOC_ENUM_FMT failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (self, "Found format %" GST_FOURCC_FORMAT " (%s)",
+ GST_FOURCC_ARGS (fmtdesc.pixelformat), fmtdesc.description);
+ *out_fmt = fmtdesc.pixelformat;
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_set_src_fmt (GstV4l2Encoder * self, GstVideoInfo * info,
+ guint32 pix_fmt)
+{
+ struct v4l2_format fmt = (struct v4l2_format) {
+ .type = self->src_buf_type,
+ };
+ gint ret;
+ gint width = info->width;
+ gint height = info->height;
+
+ ret = ioctl (self->video_fd, VIDIOC_G_FMT, &fmt);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_G_FMT failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ if (pix_fmt != fmt.fmt.pix_mp.pixelformat
+ || fmt.fmt.pix_mp.width != width || fmt.fmt.pix_mp.height != height) {
+ GST_DEBUG_OBJECT (self,
+ "Trying to use peer format: %" GST_FOURCC_FORMAT " %ix%i",
+ GST_FOURCC_ARGS (pix_fmt), width, height);
+ fmt.fmt.pix_mp.pixelformat = pix_fmt;
+ fmt.fmt.pix_mp.width = width;
+ fmt.fmt.pix_mp.height = height;
+ fmt.fmt.pix_mp.num_planes = 1;
+ fmt.fmt.pix_mp.plane_fmt[0].sizeimage =
+ GST_ROUND_UP_16 (width) * GST_ROUND_UP_16 (height) * 3 / 2;
+ ret = ioctl (self->video_fd, VIDIOC_S_FMT, &fmt);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_S_FMT failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+ }
+
+ if (fmt.fmt.pix_mp.pixelformat != pix_fmt) {
+ GST_WARNING_OBJECT (self, "Failed to set sink format to %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pix_fmt));
+ errno = EINVAL;
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gint
+gst_v4l2_encoder_request_buffers (GstV4l2Encoder * self,
+ GstPadDirection direction, guint num_buffers)
+{
+ gint ret;
+ struct v4l2_requestbuffers reqbufs = {
+ .count = num_buffers,
+ .memory = V4L2_MEMORY_MMAP,
+ .type = direction_to_buffer_type (self, direction),
+ };
+
+ GST_DEBUG_OBJECT (self, "Requesting %u buffers", num_buffers);
+
+ ret = ioctl (self->video_fd, VIDIOC_REQBUFS, &reqbufs);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_REQBUFS failed: %s", g_strerror (errno));
+ return ret;
+ }
+
+ if (direction == GST_PAD_SINK) {
+ if (reqbufs.capabilities & V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF)
+ self->supports_holding_capture = TRUE;
+ else
+ self->supports_holding_capture = FALSE;
+ }
+
+ return reqbufs.count;
+}
+
+gboolean
+gst_v4l2_encoder_export_buffer (GstV4l2Encoder * self,
+ GstPadDirection direction, gint index, gint * fds, gsize * sizes,
+ gsize * offsets, guint * num_fds)
+{
+ gint i, ret;
+ struct v4l2_plane planes[GST_VIDEO_MAX_PLANES] = { {0} };
+ struct v4l2_buffer v4l2_buf = {
+ .index = 0,
+ .type = direction_to_buffer_type (self, direction),
+ };
+
+ if (self->mplane) {
+ v4l2_buf.length = GST_VIDEO_MAX_PLANES;
+ v4l2_buf.m.planes = planes;
+ }
+
+ ret = ioctl (self->video_fd, VIDIOC_QUERYBUF, &v4l2_buf);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_QUERYBUF failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ if (self->mplane) {
+ for (i = 0; i < v4l2_buf.length; i++) {
+ struct v4l2_plane *plane = v4l2_buf.m.planes + i;
+ struct v4l2_exportbuffer expbuf = {
+ .type = direction_to_buffer_type (self, direction),
+ .index = index,
+ .plane = i,
+ .flags = O_CLOEXEC | O_RDWR,
+ };
+
+ ret = ioctl (self->video_fd, VIDIOC_EXPBUF, &expbuf);
+ if (ret < 0) {
+ gint j;
+ GST_ERROR_OBJECT (self, "VIDIOC_EXPBUF failed: %s", g_strerror (errno));
+
+ for (j = i - 1; j >= 0; j--)
+ close (fds[j]);
+
+ return FALSE;
+ }
+
+ *num_fds = v4l2_buf.length;
+ fds[i] = expbuf.fd;
+ sizes[i] = plane->length;
+ offsets[i] = plane->data_offset;
+ }
+ } else {
+ struct v4l2_exportbuffer expbuf = {
+ .type = direction_to_buffer_type (self, direction),
+ .index = index,
+ .flags = O_CLOEXEC | O_RDWR,
+ };
+
+ ret = ioctl (self->video_fd, VIDIOC_EXPBUF, &expbuf);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_EXPBUF failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ *num_fds = 1;
+ fds[0] = expbuf.fd;
+ sizes[0] = v4l2_buf.length;
+ offsets[0] = 0;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_v4l2_encoder_queue_src_buffer (GstV4l2Encoder * self,
+ GstV4l2Request * request, GstBuffer * buffer)
+{
+ gint ret;
+ gsize bytesused = gst_buffer_get_size (buffer);
+ struct v4l2_plane plane = {
+ .bytesused = bytesused,
+ };
+ struct v4l2_buffer buf = {
+ .type = self->src_buf_type,
+ .memory = V4L2_MEMORY_MMAP,
+ .index = gst_v4l2_codec_buffer_get_index (buffer),
+ };
+
+ GST_TRACE_OBJECT (self, "Queueing picture buffer %i", buf.index);
+
+ if (self->mplane) {
+ buf.length = 1;
+ buf.m.planes = &plane;
+ } else {
+ buf.bytesused = bytesused;
+ }
+
+ ret = ioctl (self->video_fd, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_QBUF failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_v4l2_encoder_queue_sink_buffer (GstV4l2Encoder * self,
+ GstV4l2Request * request, GstBuffer * buffer, guint32 frame_num,
+ guint flags)
+{
+ gint i, ret;
+ struct v4l2_plane planes[GST_VIDEO_MAX_PLANES];
+ struct v4l2_buffer buf = {
+ .type = self->sink_buf_type,
+ .memory = V4L2_MEMORY_MMAP,
+ .index = gst_v4l2_codec_buffer_get_index (buffer),
+ .timestamp.tv_usec = frame_num,
+ .request_fd = request->fd,
+ .flags = V4L2_BUF_FLAG_REQUEST_FD | flags,
+ };
+
+ GST_TRACE_OBJECT (self, "Queuing bitstream buffer %i", buf.index);
+
+ if (self->mplane) {
+ buf.length = gst_buffer_n_memory (buffer);
+ buf.m.planes = planes;
+ for (i = 0; i < buf.length; i++) {
+ GstMemory *mem = gst_buffer_peek_memory (buffer, i);
+ /* *INDENT-OFF* */
+ planes[i] = (struct v4l2_plane) {
+ .bytesused = gst_memory_get_sizes (mem, NULL, NULL),
+ };
+ /* *INDENT-ON* */
+ }
+ } else {
+ buf.bytesused = gst_buffer_get_size (buffer);
+ }
+
+ ret = ioctl (self->video_fd, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_QBUF failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_v4l2_encoder_dequeue_sink (GstV4l2Encoder * self)
+{
+ gint ret;
+ struct v4l2_plane planes[GST_VIDEO_MAX_PLANES] = { {0} };
+ struct v4l2_buffer buf = {
+ .type = self->sink_buf_type,
+ .memory = V4L2_MEMORY_MMAP,
+ };
+
+ if (self->mplane) {
+ buf.length = GST_VIDEO_MAX_PLANES;
+ buf.m.planes = planes;
+ }
+
+ ret = ioctl (self->video_fd, VIDIOC_DQBUF, &buf);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_DQBUF failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ GST_TRACE_OBJECT (self, "Dequeued picture buffer %i", buf.index);
+
+ return TRUE;
+}
+
+static gboolean
+gst_v4l2_encoder_dequeue_src (GstV4l2Encoder * self, guint32 * out_frame_num,
+ guint32 * bytesused)
+{
+ gint ret;
+ struct v4l2_plane planes[GST_VIDEO_MAX_PLANES] = { {0} };
+ struct v4l2_buffer buf = {
+ .type = self->src_buf_type,
+ .memory = V4L2_MEMORY_MMAP,
+ };
+
+ if (self->mplane) {
+ buf.length = GST_VIDEO_MAX_PLANES;
+ buf.m.planes = planes;
+ }
+
+ ret = ioctl (self->video_fd, VIDIOC_DQBUF, &buf);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_DQBUF failed: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ *out_frame_num = buf.timestamp.tv_usec;
+ *bytesused = buf.m.planes[0].bytesused;
+
+ GST_TRACE_OBJECT (self, "Dequeued bitstream buffer %i, %d bytes used",
+ buf.index, buf.m.planes[0].bytesused);
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_set_controls (GstV4l2Encoder * self, GstV4l2Request * request,
+ struct v4l2_ext_control *control, guint count)
+{
+ gint ret;
+ struct v4l2_ext_controls controls = {
+ .controls = control,
+ .count = count,
+ .request_fd = request ? request->fd : 0,
+ .which = request ? V4L2_CTRL_WHICH_REQUEST_VAL : 0,
+ };
+
+ ret = ioctl (self->video_fd, VIDIOC_S_EXT_CTRLS, &controls);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_S_EXT_CTRLS failed: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_get_controls (GstV4l2Encoder * self, GstV4l2Request * request,
+ struct v4l2_ext_control *control, guint count)
+{
+ gint ret;
+ struct v4l2_ext_controls controls = {
+ .controls = control,
+ .count = count,
+ .request_fd = request ? request->fd : 0,
+ .which = request ? V4L2_CTRL_WHICH_REQUEST_VAL : 0,
+ };
+
+ ret = ioctl (self->video_fd, VIDIOC_G_EXT_CTRLS, &controls);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "VIDIOC_G_EXT_CTRLS failed: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_encoder_query_control_size (GstV4l2Encoder * self,
+ unsigned int control_id, unsigned int *control_size)
+{
+ gint ret;
+ struct v4l2_query_ext_ctrl control = {
+ .id = control_id,
+ };
+
+ if (control_size)
+ *control_size = 0;
+
+ ret = ioctl (self->video_fd, VIDIOC_QUERY_EXT_CTRL, &control);
+ if (ret < 0)
+ /*
+ * It's not an error if a control is not supported by this driver.
+ * Return false but don't print any error.
+ */
+ return FALSE;
+
+ if (control_size)
+ *control_size = control.elem_size;
+ return TRUE;
+}
+
+void
+gst_v4l2_encoder_install_properties (GObjectClass * gobject_class,
+ gint prop_offset, GstV4l2CodecDevice * device)
+{
+ const gchar *media_device_path = NULL;
+ const gchar *video_device_path = NULL;
+
+ if (device) {
+ media_device_path = device->media_device_path;
+ video_device_path = device->video_device_path;
+ }
+
+ g_object_class_install_property (gobject_class, PROP_MEDIA_DEVICE,
+ g_param_spec_string ("media-device", "Media Device Path",
+ "Path to the media device node", media_device_path,
+ G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_VIDEO_DEVICE,
+ g_param_spec_string ("video-device", "Video Device Path",
+ "Path to the video device node", video_device_path,
+ G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+void
+gst_v4l2_encoder_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstV4l2Encoder *self = GST_V4L2_ENCODER (object);
+
+ switch (prop_id) {
+ case PROP_MEDIA_DEVICE:
+ g_free (self->media_device);
+ self->media_device = g_value_dup_string (value);
+ break;
+ case PROP_VIDEO_DEVICE:
+ g_free (self->video_device);
+ self->video_device = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+void
+gst_v4l2_encoder_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstV4l2Encoder *self = GST_V4L2_ENCODER (object);
+
+ switch (prop_id) {
+ case PROP_MEDIA_DEVICE:
+ g_value_set_string (value, self->media_device);
+ break;
+ case PROP_VIDEO_DEVICE:
+ g_value_set_string (value, self->video_device);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/**
+ * gst_v4l2_encoder_register:
+ * @plugin: a #GstPlugin
+ * @en_type: A #GType for the codec
+ * @class_init: The #GClassInitFunc for #dec_type
+ * @instance_init: The #GInstanceInitFunc for #dec_type
+ * @element_name_tmpl: A string to use for the first codec found and as a template for the next ones.
+ * @device: (transfer full) A #GstV4l2CodecDevice
+ * @rank: The rank to use for the element
+ * @class_data: (nullable) (transfer full) A #gpointer to pass as class_data, set to @device if null
+ * @element_name (nullable) (out) Sets the pointer to the new element name
+ *
+ * Registers a encoder element as a subtype of @en_type for @plugin.
+ * Will create a different sub_types for each subsequent @encoder of the
+ * same type.
+ */
+void
+gst_v4l2_encoder_register (GstPlugin * plugin,
+ GType enc_type, GClassInitFunc class_init, gconstpointer class_data,
+ GInstanceInitFunc instance_init, const gchar * element_name_tmpl,
+ GstV4l2CodecDevice * device, guint rank, gchar ** element_name)
+{
+ GTypeQuery type_query;
+ GTypeInfo type_info = { 0, };
+ GType subtype;
+ gchar *type_name;
+
+ g_type_query (enc_type, &type_query);
+ memset (&type_info, 0, sizeof (type_info));
+ type_info.class_size = type_query.class_size;
+ type_info.instance_size = type_query.instance_size;
+ type_info.class_init = class_init;
+ type_info.class_data = class_data;
+ type_info.instance_init = instance_init;
+
+ if (class_data == device)
+ GST_MINI_OBJECT_FLAG_SET (device, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+
+ /* The first encoder to be registered should use a constant name, like
+ * v4l2slvp8enc, for any additional encoders, we create unique names. Encoder
+ * names may change between boots, so this should help gain stable names for
+ * the most common use cases. SL stands for state-less, we differentiate
+ * with v4l2vp8enc as this element may not have the same properties */
+ type_name = g_strdup_printf (element_name_tmpl, "");
+
+ if (g_type_from_name (type_name) != 0) {
+ gchar *basename = g_path_get_basename (device->video_device_path);
+ g_free (type_name);
+ type_name = g_strdup_printf (element_name_tmpl, basename);
+ g_free (basename);
+ }
+
+ subtype = g_type_register_static (enc_type, type_name, &type_info, 0);
+
+ if (!gst_element_register (plugin, type_name, rank, subtype)) {
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+ g_free (type_name);
+ type_name = NULL;
+ }
+
+ if (element_name)
+ *element_name = type_name;
+ else
+ g_free (type_name);
+}
+
+/*
+ * gst_v4l2_encoder_alloc_request:
+ * @self a #GstV4l2Encoder pointer
+ * @frame_num: Used as a timestamp to identify references
+ * @pic_buf the #GstBuffer holding the raw picture
+ * @bitstream the #GstBuffer that holds the encoded data
+ *
+ * Allocate a Linux media request file descriptor. This request wrapper will
+ * hold a reference to the requested picture buffer to encoded and the
+ * bitstream buffer this request will encode to. This will be used for
+ * transparent management of the V4L2 queues.
+ *
+ * Returns: a new #GstV4l2Request
+ */
+GstV4l2Request *
+gst_v4l2_encoder_alloc_request (GstV4l2Encoder * self, guint32 frame_num,
+ GstBuffer * pic_buf, GstBuffer * bitstream)
+{
+ GstV4l2Request *request = gst_queue_array_pop_head (self->request_pool);
+ gint ret;
+
+ if (!request) {
+ request = g_new0 (GstV4l2Request, 1);
+
+ ret = ioctl (self->media_fd, MEDIA_IOC_REQUEST_ALLOC, &request->fd);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "MEDIA_IOC_REQUEST_ALLOC failed: %s",
+ g_strerror (errno));
+ return NULL;
+ }
+
+ request->poll = gst_poll_new (FALSE);
+ gst_poll_fd_init (&request->pollfd);
+ request->pollfd.fd = request->fd;
+ gst_poll_add_fd (request->poll, &request->pollfd);
+ gst_poll_fd_ctl_pri (request->poll, &request->pollfd, TRUE);
+ }
+
+ request->encoder = g_object_ref (self);
+ request->bitstream = gst_buffer_ref (bitstream);
+ request->pic_buf = gst_buffer_ref (pic_buf);
+ request->frame_num = frame_num;
+ request->ref_count = 1;
+
+ return request;
+}
+
+/*
+ * gst_v4l2_encoder_alloc_sub_request:
+ * @self a #GstV4l2Encoder pointer
+ * @prev_request the #GstV4l2Request this request continue
+ * @bitstream the #GstBuffer that holds the bitstream data
+ *
+ * Allocate a Linux media request file descriptor. Similar to
+ * gst_v4l2_encoder_alloc_request(), but used when a request is the
+ * continuation of the encoding of the same picture. This is notably the case
+ * for subsequent slices or for second field of a frame.
+ *
+ * Returns: a new #GstV4l2Request
+ */
+GstV4l2Request *
+gst_v4l2_encoder_alloc_sub_request (GstV4l2Encoder * self,
+ GstV4l2Request * prev_request, GstBuffer * bitstream)
+{
+ GstV4l2Request *request = gst_queue_array_pop_head (self->request_pool);
+ gint ret;
+
+ if (!request) {
+ request = g_new0 (GstV4l2Request, 1);
+
+ ret = ioctl (self->media_fd, MEDIA_IOC_REQUEST_ALLOC, &request->fd);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (self, "MEDIA_IOC_REQUEST_ALLOC failed: %s",
+ g_strerror (errno));
+ return NULL;
+ }
+
+ request->poll = gst_poll_new (FALSE);
+ gst_poll_fd_init (&request->pollfd);
+ request->pollfd.fd = request->fd;
+ gst_poll_add_fd (request->poll, &request->pollfd);
+ gst_poll_fd_ctl_pri (request->poll, &request->pollfd, TRUE);
+ }
+
+ request->encoder = g_object_ref (self);
+ request->bitstream = gst_buffer_ref (bitstream);
+ request->pic_buf = gst_buffer_ref (prev_request->pic_buf);
+ request->frame_num = prev_request->frame_num;
+ request->sub_request = TRUE;
+ request->ref_count = 1;
+
+ return request;
+}
+
+GstV4l2Request *
+gst_v4l2_encoder_request_ref (GstV4l2Request * request)
+{
+ request->ref_count++;
+ return request;
+}
+
+static void
+gst_v4l2_encoder_request_free (GstV4l2Request * request)
+{
+ GstV4l2Encoder *encoder = request->encoder;
+
+ request->encoder = NULL;
+ close (request->fd);
+ gst_poll_free (request->poll);
+ g_free (request);
+
+ if (encoder)
+ g_object_unref (encoder);
+}
+
+void
+gst_v4l2_encoder_request_unref (GstV4l2Request * request)
+{
+ GstV4l2Encoder *encoder = request->encoder;
+ gint ret;
+
+ g_return_if_fail (request->ref_count > 0);
+
+ if (--request->ref_count > 0)
+ return;
+
+ g_clear_pointer (&request->bitstream, gst_buffer_unref);
+ g_clear_pointer (&request->pic_buf, gst_buffer_unref);
+ request->frame_num = G_MAXUINT32;
+ request->failed = FALSE;
+ request->hold_pic_buf = FALSE;
+ request->sub_request = FALSE;
+
+ if (request->pending) {
+ gint idx;
+
+ GST_DEBUG_OBJECT (encoder, "Freeing pending request %i.", request->fd);
+
+ idx = gst_queue_array_find (encoder->pending_requests, NULL, request);
+ if (idx >= 0)
+ gst_queue_array_drop_element (encoder->pending_requests, idx);
+
+ gst_v4l2_encoder_request_free (request);
+ return;
+ }
+
+ GST_TRACE_OBJECT (encoder, "Recycling request %i.", request->fd);
+
+ ret = ioctl (request->fd, MEDIA_REQUEST_IOC_REINIT, NULL);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (request->encoder, "MEDIA_REQUEST_IOC_REINIT failed: %s",
+ g_strerror (errno));
+ gst_v4l2_encoder_request_free (request);
+ return;
+ }
+
+ gst_queue_array_push_tail (encoder->request_pool, request);
+ g_clear_object (&request->encoder);
+}
+
+gboolean
+gst_v4l2_encoder_request_queue (GstV4l2Request * request, guint flags)
+{
+ GstV4l2Encoder *encoder = request->encoder;
+ gint ret;
+
+ GST_TRACE_OBJECT (encoder, "Queuing request %i.", request->fd);
+
+ /* this would lead to stalls if we tried to use this feature and it wasn't
+ * supported. */
+ if ((flags & V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF)
+ && !encoder->supports_holding_capture) {
+ GST_ERROR_OBJECT (encoder,
+ "Driver does not support holding capture buffer.");
+ return FALSE;
+ }
+
+ if (!gst_v4l2_encoder_queue_sink_buffer (encoder, request,
+ request->pic_buf, request->frame_num, flags)) {
+ GST_ERROR_OBJECT (encoder, "Driver did not accept the picture buffer.");
+ return FALSE;
+ }
+
+ if (!gst_v4l2_encoder_queue_src_buffer (encoder, request, request->bitstream)) {
+ GST_ERROR_OBJECT (encoder, "Driver did not accept the bitstream buffer.");
+ return FALSE;
+ }
+
+ ret = ioctl (request->fd, MEDIA_REQUEST_IOC_QUEUE, NULL);
+ if (ret < 0) {
+ GST_ERROR_OBJECT (encoder, "MEDIA_REQUEST_IOC_QUEUE, failed: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ if (flags & V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF)
+ request->hold_pic_buf = TRUE;
+
+ request->pending = TRUE;
+ gst_queue_array_push_tail (encoder->pending_requests,
+ gst_v4l2_encoder_request_ref (request));
+
+ if (gst_queue_array_get_length (encoder->pending_requests) > 1) {
+ GstV4l2Request *pending_req;
+
+ pending_req = gst_queue_array_peek_head (encoder->pending_requests);
+ gst_v4l2_request_set_done (pending_req);
+ }
+
+ return TRUE;
+}
+
+gint
+gst_v4l2_encoder_request_set_done (GstV4l2Request * request,
+ guint32 * bytesused)
+{
+ GstV4l2Encoder *encoder = request->encoder;
+ GstV4l2Request *pending_req = NULL;
+ gint ret;
+
+ *bytesused = 0;
+ if (!request->pending)
+ return 1;
+
+ GST_DEBUG_OBJECT (encoder, "Waiting for request %i to complete.",
+ request->fd);
+
+ ret = gst_poll_wait (request->poll, GST_SECOND);
+ if (ret == 0) {
+ GST_WARNING_OBJECT (encoder, "Request %i took too long.", request->fd);
+ return 0;
+ }
+
+ if (ret < 0) {
+ GST_WARNING_OBJECT (encoder, "Request %i error: %s (%i)",
+ request->fd, g_strerror (errno), errno);
+ return ret;
+ }
+
+ while ((pending_req = gst_queue_array_pop_head (encoder->pending_requests))) {
+ gst_v4l2_encoder_dequeue_sink (encoder);
+
+ if (!pending_req->hold_pic_buf) {
+ guint32 frame_num = G_MAXUINT32;
+ if (!gst_v4l2_encoder_dequeue_src (encoder, &frame_num, bytesused)) {
+ pending_req->failed = TRUE;
+ } else if (frame_num != pending_req->frame_num) {
+ GST_WARNING_OBJECT (encoder,
+ "Requested frame %u, but driver returned frame %u.",
+ pending_req->frame_num, frame_num);
+ pending_req->failed = TRUE;
+ }
+ }
+
+ pending_req->pending = FALSE;
+ gst_v4l2_encoder_request_unref (pending_req);
+
+ if (pending_req == request)
+ break;
+ }
+
+ /* Pending request must be in the pending request list */
+ g_assert (pending_req == request);
+
+ return ret;
+}
+
+gboolean
+gst_v4l2_codec_vp8_enc_get_qp_range (GstV4l2Encoder * self,
+ guint * qp_min, guint * qp_max)
+{
+ gint ret;
+ struct v4l2_query_ext_ctrl control = {
+ .id = V4L2_CID_STATELESS_VP8_ENCODE_QP,
+ };
+
+ *qp_max = 127;
+ *qp_min = 0;
+
+ ret = ioctl (self->video_fd, VIDIOC_QUERY_EXT_CTRL, &control);
+ if (ret < 0) {
+ GST_DEBUG_OBJECT (self, "unable to retrieve encoder qp, use default");
+ return FALSE;
+ }
+
+ *qp_max = control.maximum;
+ *qp_min = control.minimum;
+ GST_DEBUG_OBJECT (self, "encoder qp min %d qp max %d", *qp_min, *qp_max);
+
+ return TRUE;
+}
diff --git a/sys/v4l2codecs/gstv4l2encoder.h b/sys/v4l2codecs/gstv4l2encoder.h
new file mode 100644
index 0000000..7ff01a9
--- /dev/null
+++ b/sys/v4l2codecs/gstv4l2encoder.h
@@ -0,0 +1,139 @@
+/* GStreamer
+ * Copyright (C) 2022 Benjamin Gaignard <benjamin.gaignard@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_V4L2_ENCODER_H__
+#define __GST_V4L2_ENCODER_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstv4l2codecdevice.h"
+#include "linux/videodev2.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_V4L2_ENCODER gst_v4l2_encoder_get_type ()
+G_DECLARE_FINAL_TYPE (GstV4l2Encoder, gst_v4l2_encoder, GST, V4L2_ENCODER, GstObject);
+
+typedef struct _GstV4l2Request GstV4l2Request;
+
+GstV4l2Encoder * gst_v4l2_encoder_new (GstV4l2CodecDevice * device);
+
+guint gst_v4l2_encoder_get_version (GstV4l2Encoder * self);
+
+gboolean gst_v4l2_encoder_open (GstV4l2Encoder * decoder);
+
+gboolean gst_v4l2_encoder_close (GstV4l2Encoder * decoder);
+
+gboolean gst_v4l2_encoder_streamon (GstV4l2Encoder * self,
+ GstPadDirection direction);
+
+gboolean gst_v4l2_encoder_streamoff (GstV4l2Encoder * self,
+ GstPadDirection direction);
+
+gboolean gst_v4l2_encoder_flush (GstV4l2Encoder * self);
+
+gboolean gst_v4l2_encoder_enum_sink_fmt (GstV4l2Encoder * self,
+ gint i, guint32 * out_fmt);
+
+GstCaps * gst_v4l2_encoder_list_sink_formats (GstV4l2Encoder * self);
+
+gboolean gst_v4l2_encoder_select_sink_format (GstV4l2Encoder * self,
+ GstVideoInfo * in, GstVideoInfo * out);
+
+gboolean gst_v4l2_encoder_enum_src_formats (GstV4l2Encoder * self,
+ gint i, guint32 * out_fmt);
+
+gboolean gst_v4l2_encoder_set_src_fmt (GstV4l2Encoder * self, GstVideoInfo * info, guint32 pix_fmt);
+
+gint gst_v4l2_encoder_request_buffers (GstV4l2Encoder * self,
+ GstPadDirection direction,
+ guint num_buffers);
+
+gboolean gst_v4l2_encoder_export_buffer (GstV4l2Encoder * self,
+ GstPadDirection directon,
+ gint index,
+ gint * fds,
+ gsize * sizes,
+ gsize * offsets,
+ guint *num_fds);
+
+gboolean gst_v4l2_encoder_set_controls (GstV4l2Encoder * self,
+ GstV4l2Request * request,
+ struct v4l2_ext_control *control,
+ guint count);
+
+gboolean gst_v4l2_encoder_get_controls (GstV4l2Encoder * self,
+ GstV4l2Request * request,
+ struct v4l2_ext_control * control,
+ guint count);
+
+gboolean gst_v4l2_encoder_query_control_size (GstV4l2Encoder * self,
+ unsigned int control_id,
+ unsigned int *control_size);
+
+void gst_v4l2_encoder_install_properties (GObjectClass * gobject_class,
+ gint prop_offset,
+ GstV4l2CodecDevice * device);
+
+void gst_v4l2_encoder_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+void gst_v4l2_encoder_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+void gst_v4l2_encoder_register (GstPlugin * plugin,
+ GType dec_type,
+ GClassInitFunc class_init,
+ gconstpointer class_data,
+ GInstanceInitFunc instance_init,
+ const gchar *element_name_tmpl,
+ GstV4l2CodecDevice * device,
+ guint rank,
+ gchar ** element_name);
+
+GstV4l2Request *gst_v4l2_encoder_alloc_request (GstV4l2Encoder * self,
+ guint32 frame_num,
+ GstBuffer * pic_buf,
+ GstBuffer * bitstream);
+
+GstV4l2Request *gst_v4l2_encoder_alloc_ro_request (GstV4l2Encoder * self);
+
+GstV4l2Request *gst_v4l2_encoder_alloc_sub_request (GstV4l2Encoder * self,
+ GstV4l2Request * prev_request,
+ GstBuffer * bitstream);
+
+GstV4l2Request * gst_v4l2_encoder_request_ref (GstV4l2Request * request);
+
+void gst_v4l2_encoder_request_unref (GstV4l2Request * request);
+void gst_v4l2_encoder_ro_request_unref (GstV4l2Request * request);
+
+
+gboolean gst_v4l2_encoder_request_queue (GstV4l2Request * request,
+ guint flags);
+
+gint gst_v4l2_encoder_request_set_done (GstV4l2Request * request, guint32 * bytesused);
+
+gboolean gst_v4l2_encoder_request_failed (GstV4l2Request * request);
+
+gboolean gst_v4l2_codec_vp8_enc_get_qp_range (GstV4l2Encoder * self, guint * qp_min, guint * qp_max);
+
+G_END_DECLS
+
+#endif /* __GST_V4L2_ENCODER_H__ */
diff --git a/sys/v4l2codecs/meson.build b/sys/v4l2codecs/meson.build
index 836dc53..53e8923 100644
--- a/sys/v4l2codecs/meson.build
+++ b/sys/v4l2codecs/meson.build
@@ -11,6 +11,7 @@ v4l2codecs_sources = [
'gstv4l2decoder.c',
'gstv4l2format.c',
'gstv4l2codecalphadecodebin.c',
+ 'gstv4l2encoder.c',
]
libgudev_dep = dependency('gudev-1.0', required: get_option('v4l2codecs'))
--
2.25.1