Guest
Schieben Sie OpenCV mat in eine DeepStream-Pipeline
Post
by Guest » 20 Jan 2025, 18:01
Ich würde gerne einen Videostream von OpenCv öffnen und Bild für Bild in eine DeepStream-Pipeline pushen, um mit tesornRT einen Rückschluss auf das Yolov3-Modell zu ziehen, weiß aber nicht, wie das funktioniert.
Ich versuche, den Anweisungen zu folgen, die ich hier gefunden habe, aber immer noch nichts...
Das ist mein Code:
Code: Select all
#include
#include
#include
#include
#include
#include
#include
static GMainLoop *loop;
static void
cb_need_data (GstElement *appsrc,
guint unused_size,
gpointer user_data)
{
static gboolean white = FALSE;
static GstClockTime timestamp = 0;
guint size,depth,height,width,step,channels;
GstFlowReturn ret ;
IplImage* img;
guchar *data1;
GstMapInfo map;
cv::Mat imgMat = imread("cat.jpg",cv::IMREAD_COLOR);
cvtColor(imgMat,imgMat,cv::COLOR_BGR2YUV);
IplImage imgIpl = imgMat;
img = &imgIpl;
height = img->height;
width = img->width;
step = img->widthStep;
channels = img->nChannels;
depth = img->depth;
data1 = (guchar *)img->imageData;
size = height*width*channels;
GstBuffer *buffer = NULL;//gst_buffer_new_allocate (NULL, size, NULL);
g_print("frame_height: %d \n",img->height);
g_print("frame_width: %d \n",img->width);
g_print("frame_channels: %d \n",img->nChannels);
g_print("frame_size: %d \n",height*width*channels);
buffer = gst_buffer_new_allocate (NULL, size, NULL);
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
memcpy( (guchar *)map.data, data1, gst_buffer_get_size( buffer ) );
/* this makes the image black/white */
//gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);
white = !white;
GST_BUFFER_PTS (buffer) = timestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);
timestamp += GST_BUFFER_DURATION (buffer);
//gst_app_src_push_buffer ((GstAppSrc *)appsrc, buffer);
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
g_print("quit");
/* something wrong, stop pushing */
g_main_loop_quit (loop);
}
//g_print("return");
}
gint
main (gint argc,
gchar *argv[])
{
GstElement *pipeline, *appsrc, *conv, *videosink, *sink,*nvosd,*streammux;
/* init GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* setup pipeline */
pipeline = gst_pipeline_new ("pipeline");
appsrc = gst_element_factory_make ("appsrc", "source");
conv = gst_element_factory_make ("videoconvert", "conv");
streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
//videosink = gst_element_factory_make("appsink","app-sink");
/* setup */
g_object_set (G_OBJECT (appsrc), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 360,
"framerate", GST_TYPE_FRACTION, 1, 1,
NULL), NULL);
gst_bin_add_many (GST_BIN (pipeline), appsrc, conv,streammux,sink,NULL);
gst_element_link_many (appsrc,conv,streammux,sink ,NULL);
//g_object_set (videosink, "device", "/dev/video0", NULL);
/* setup appsrc */
g_object_set (G_OBJECT (appsrc),
"stream-type", 0,
"format", GST_FORMAT_TIME, NULL);
g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);
/* play */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
g_main_loop_unref (loop);
return 0;
}
Ich bin ein absoluter Anfänger. Wenn jemand etwas Code zeigen kann, wird es viel besser sein.
Danke.
1737392504
Guest
Ich würde gerne einen Videostream von OpenCv öffnen und Bild für Bild in eine DeepStream-Pipeline pushen, um mit tesornRT einen Rückschluss auf das Yolov3-Modell zu ziehen, weiß aber nicht, wie das funktioniert. Ich versuche, den Anweisungen zu folgen, die ich hier gefunden habe, aber immer noch nichts... Das ist mein Code: [code]#include #include #include #include #include #include #include static GMainLoop *loop; static void cb_need_data (GstElement *appsrc, guint unused_size, gpointer user_data) { static gboolean white = FALSE; static GstClockTime timestamp = 0; guint size,depth,height,width,step,channels; GstFlowReturn ret ; IplImage* img; guchar *data1; GstMapInfo map; cv::Mat imgMat = imread("cat.jpg",cv::IMREAD_COLOR); cvtColor(imgMat,imgMat,cv::COLOR_BGR2YUV); IplImage imgIpl = imgMat; img = &imgIpl; height = img->height; width = img->width; step = img->widthStep; channels = img->nChannels; depth = img->depth; data1 = (guchar *)img->imageData; size = height*width*channels; GstBuffer *buffer = NULL;//gst_buffer_new_allocate (NULL, size, NULL); g_print("frame_height: %d \n",img->height); g_print("frame_width: %d \n",img->width); g_print("frame_channels: %d \n",img->nChannels); g_print("frame_size: %d \n",height*width*channels); buffer = gst_buffer_new_allocate (NULL, size, NULL); gst_buffer_map (buffer, &map, GST_MAP_WRITE); memcpy( (guchar *)map.data, data1, gst_buffer_get_size( buffer ) ); /* this makes the image black/white */ //gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size); white = !white; GST_BUFFER_PTS (buffer) = timestamp; GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1); timestamp += GST_BUFFER_DURATION (buffer); //gst_app_src_push_buffer ((GstAppSrc *)appsrc, buffer); g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret); if (ret != GST_FLOW_OK) { g_print("quit"); /* something wrong, stop pushing */ g_main_loop_quit (loop); } //g_print("return"); } gint main (gint argc, gchar *argv[]) { GstElement *pipeline, *appsrc, *conv, *videosink, *sink,*nvosd,*streammux; /* init GStreamer */ gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* setup pipeline */ pipeline = gst_pipeline_new ("pipeline"); appsrc = gst_element_factory_make ("appsrc", "source"); conv = gst_element_factory_make ("videoconvert", "conv"); streammux = gst_element_factory_make ("nvstreammux", "stream-muxer"); sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer"); //videosink = gst_element_factory_make("appsink","app-sink"); /* setup */ g_object_set (G_OBJECT (appsrc), "caps", gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "RGB", "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 360, "framerate", GST_TYPE_FRACTION, 1, 1, NULL), NULL); gst_bin_add_many (GST_BIN (pipeline), appsrc, conv,streammux,sink,NULL); gst_element_link_many (appsrc,conv,streammux,sink ,NULL); //g_object_set (videosink, "device", "/dev/video0", NULL); /* setup appsrc */ g_object_set (G_OBJECT (appsrc), "stream-type", 0, "format", GST_FORMAT_TIME, NULL); g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL); /* play */ gst_element_set_state (pipeline, GST_STATE_PLAYING); g_main_loop_run (loop); /* clean up */ gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (GST_OBJECT (pipeline)); g_main_loop_unref (loop); return 0; } [/code] Ich bin ein absoluter Anfänger. Wenn jemand etwas Code zeigen kann, wird es viel besser sein. Danke.