Hello,
In a system I’m trying to build with Jetson Nano and an IMX219 camera, I’m encountering an issue where I process the frames captured from the camera using GStreamer and OpenCV. Initially, I direct the captured frame to OpenCV for processing in the on_new_sample
function. Then, I convert the processed frame back to a GStreamer buffer within another function (displayFrame
) and send it to appsrc to display it on the screen. However, I’m encountering the error “nvbuf_utils: nvbuffer Payload Type not supported gst_nvvconv_transform: NvBufferGetParams Failed” when sending the frame to appsrc. How can I resolve this issue?
Thank you.
It is my code;
/*
capsfilter1 = gst_element_factory_make("capsfilter", "cfilter1");
tee0 = gst_element_factory_make("tee", "tee0");
tee1 = gst_element_factory_make("tee", "tee1");
q0 = gst_element_factory_make("queue", "q0");
q1 = gst_element_factory_make("queue", "q1");
q2 = gst_element_factory_make("queue", "q2");
q3 = gst_element_factory_make("queue", "q3");
compositor = gst_element_factory_make("compositor", "compositor0");
GstElement *testsource = gst_element_factory_make("videotestsrc", "src_test");
GstElement *videorate = gst_element_factory_make("videorate", "vrt");
*/
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/app/gstappsink.h>
#include <gst/app/gstappsrc.h>
#include <gdk/gdkx.h>
#include <gtk/gtk.h>
#include <thread>
#include <mutex>
#include <X11/Xlib.h>
#include <iostream>
// Gerekli diğer başlıkları buraya ekleyin
static int frame_counter = 0;
static int frameCounter = 0;
cv::Mat next_frame = cv::Mat::zeros(1280, 720, CV_8UC3);
std::mutex frameMutex;
GMainLoop *loop = nullptr;
GstFlowReturn ret;
GstElement *pipeline, *source, *converter0, *sink0, *capsfilter0, *sink1, *pipelineFromOpenCV, *sourceFromOpenCV, *converter1, *capsfilter1;
void displayFrame();
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
static GstFlowReturn on_new_sample(GstElement *sink, gpointer user_data)
{
frame_counter++; // Her çağrıldığında sayaç değerini artır
g_print("frame_counter : %d\n", frame_counter);
GstSample *sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
if (sample)
{
GstCaps *caps = gst_sample_get_caps(sample);
GstStructure *structure = gst_caps_get_structure(caps, 0);
gint width, height;
gst_structure_get_int(structure, "width", &width);
gst_structure_get_int(structure, "height", &height);
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo map;
gst_buffer_map(buffer, &map, GST_MAP_READ);
cv::Mat current_frame = cv::Mat(height, width, CV_8UC3, map.data);
// cv::cvtColor(cv::Mat(height + height / 2, width, CV_8UC1, map.data), current_frame, cv::COLOR_YUV2BGR_NV12);
// std::cout << "frame1 rows: " << current_frame.rows << ", cols: " << current_frame.cols << std::endl;
g_print("Frame1 rows: %d, cols: %d\n", current_frame.rows, current_frame.cols);
std::cout << "map.data: " << static_cast<void *>(map.data) << std::endl;
if (!current_frame.empty())
{
// Klonlama işlemi yerine doğrudan atama
frameMutex.lock();
current_frame.copyTo(next_frame);
frameMutex.unlock();
// std::cout << "frame2 rows: " << next_frame.rows << ", cols: " << next_frame.cols << std::endl;
g_print("Frame2 rows %d, cols: %d\n", next_frame.rows, next_frame.cols);
if (next_frame.data == nullptr)
{
// std::cerr << "Error: next_frame.data is nullptr after cloning." << std::endl;
g_printerr("Error: next_frame.data is nullptr after cloning.");
}
cv::Point pt1(100, 100);
cv::Point pt2(300, 300);
cv::Scalar color(0, 255, 0); // Yeşil
cv::rectangle(next_frame, pt1, pt2, color, 2);
g_print("Frame Received - Total Frames: %d\n", frame_counter);
std::thread displayThread(displayFrame);
displayThread.detach();
}
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
}
return GST_FLOW_OK;
}
void displayFrame(cv::Mat frame)
{
g_print("Display İnsideee!!! \n");
cv::Mat readyFrame = cv::Mat::zeros(1280, 720, CV_8UC3);
cv::Mat nv12_frame;
frameCounter++;
if (!next_frame.empty())
{
g_print("Here: \n");
frameMutex.lock();
next_frame.copyTo(readyFrame);
frameMutex.unlock();
g_print("Coming Frame: %d\n", frameCounter);
GstBuffer *buffer = gst_buffer_new_wrapped_full(
GST_MEMORY_FLAG_READONLY,
readyFrame.data,
readyFrame.total() * readyFrame.elemSize(),
0,
readyFrame.total() * readyFrame.elemSize(),
nullptr,
nullptr);
g_signal_emit_by_name(sourceFromOpenCV, "push-buffer", buffer, &ret);
gst_buffer_unref(buffer);
}
}
int main(int argc, char *argv[])
{
gst_init(&argc, &argv);
pipeline = gst_element_factory_make("pipeline", "pipeline0");
pipelineFromOpenCV = gst_element_factory_make("pipeline", "pipeline1");
sourceFromOpenCV = gst_element_factory_make("appsrc", "src1");
source = gst_element_factory_make("nvarguscamerasrc", "src");
sink0 = gst_element_factory_make("autovideosink", "sink0");//autovideosink ximagesink nvoverlaysink
sink1 = gst_element_factory_make("appsink", "sink1");
converter0 = gst_element_factory_make("nvvideoconvert", "conv0"); //nvvidconv videoconvert nvvideoconvert
converter1 = gst_element_factory_make("nvvidconv", "conv1");
capsfilter0 = gst_element_factory_make("capsfilter", "cfilter0");
capsfilter1 = gst_element_factory_make("capsfilter", "cfilter1");
gst_app_src_set_emit_signals(GST_APP_SRC(sourceFromOpenCV), true);
g_object_set(G_OBJECT(capsfilter0),
"caps",
gst_caps_from_string("video/x-raw(memory:NVMM), width=1280, height=720, format=NV12, framerate=30/1"),
nullptr);
// g_object_set(G_OBJECT(capsfilter1),
// "caps",
// gst_caps_from_string("video/x-raw(memory:NVMM), width=1280, height=720, format=NV12, framerate=30/1"),
// nullptr);
g_object_set(G_OBJECT(source), "sensor-id", 0, "do-timestamp", TRUE, NULL);
g_object_set(G_OBJECT(sink1), "async", FALSE);
g_object_set(sink1, "emit-signals", TRUE, "sync", false, "max-buffers", 30, "drop", true, nullptr);
g_signal_connect(sink1, "new-sample", G_CALLBACK(on_new_sample), nullptr);
g_signal_connect(sourceFromOpenCV, "need-data", G_CALLBACK(displayFrame), NULL);
if (!pipeline || !source || !capsfilter0 || !converter0 || !sink1)
{
g_printerr("Not all elements could be created. Exiting.\n");
return -1;
}
if (!pipelineFromOpenCV || !sourceFromOpenCV || !converter1 || !sink0)
{
g_printerr("Not all elements could be created. Exiting1.\n");
return -1;
}
gst_bin_add_many(GST_BIN(pipeline), source, capsfilter0, sink1, converter0, NULL);
gst_bin_add_many(GST_BIN(pipelineFromOpenCV), sourceFromOpenCV, converter1, sink0, NULL);
if (!gst_element_link_many(source, capsfilter0, converter0, sink1, NULL))
{
g_printerr("Link problem0.\n");
return -1;
}
if (!gst_element_link_many(sourceFromOpenCV, converter1, sink0, NULL))
{
g_printerr("Link problem1.\n");
return -1;
}
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_call, NULL);
gst_object_unref(bus);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipelineFromOpenCV));
gst_bus_add_watch(bus, (GstBusFunc)bus_call, NULL);
gst_object_unref(bus);
int cnt = 0;
GstStateChangeReturn ret;
do
{
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_printerr("Unable to set the pipeline to the playing state. Exiting.\n");
gst_object_unref(pipeline);
return -1;
}
} while (ret != GST_STATE_CHANGE_SUCCESS);
do
{
ret = gst_element_set_state(pipelineFromOpenCV, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_printerr("Unable to set the pipeline to the playing state. Exiting1.\n");
gst_object_unref(pipelineFromOpenCV);
return -1;
}
} while (ret != GST_STATE_CHANGE_SUCCESS);
loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(loop);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
gst_element_set_state(sourceFromOpenCV, GST_STATE_NULL);
gst_object_unref(sourceFromOpenCV);
g_main_loop_unref(loop);
return 0;
}
And It is gst_debug log;
debug_output.txt (5.6 MB)