realtime v4l2src for deepstream test1 c application does not work

大城市里の小女人 提交于 2020-03-05 04:57:07

问题


So my pipeline is as such which works with gst_parse_launch in the c code below but I wanted to use dynamic pipelines, and I am not getting error but at the same time, I am not getting the desired output too.

 gst-launch-1.0 v4l2src ! 'video/x-raw,format=(string)YUY2' ! nvvidconv ! 'video/x-raw(memory:NVMM),format=(string)NV12' ! nvvidconv ! 'video/x-raw,format=(string)NV12' ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=(string)NV12' ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink 

and it runs perfectly with gst_parse_launch such as this c application

#include <gst/gst.h>

int
main (int argc, char *argv[])
{
  GstElement *pipeline;
  GstBus *bus;
  GstMessage *msg;

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Build the pipeline */
  pipeline =
      gst_parse_launch
    ("v4l2src ! video/x-raw,format=(string)YUY2 ! nvvidconv ! video/x-raw(memory:NVMM),format=(string)NV12 ! nvvidconv ! video/x-raw,format=(string)NV12 ! nvvideoconvert ! video/x-raw(memory:NVMM),format=(string)NV12 ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink", NULL);

  /* Start playing */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg =
      gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
      GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

  /* Free resources */
  if (msg != NULL)
    gst_message_unref (msg);
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  return 0;
}

But I am trying to make it run using this c application

#include <gst/gst.h>

typedef struct _CustomData{
  GstElement *pipeline;
  GstElement *source;
  GstElement *filter1;
  GstElement *filter2;
  GstElement *filter3;
  GstElement *filter4;
  GstElement *convert1;
  GstElement *convert2;
  GstElement *mux;
  GstElement *infer;
  GstElement *tiler;
  GstElement *videoconvert1;
  GstElement *videoconvert2;
  GstElement *osd;
  GstElement *transform;
  GstElement *sink;
  GstElement *bin;
}CustomData;

static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
  GMainLoop *loop = (GMainLoop *) data;
  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_ERROR:{
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_printerr ("ERROR from element %s: %s\n",
          GST_OBJECT_NAME (msg->src), error->message);
      if (debug)
        g_printerr ("Error details: %s\n", debug);
      g_free (debug);
      g_error_free (error);
      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }
  return TRUE;
}



int main(int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  CustomData data;
  GstBus *bus;
  GstCaps *caps1, *caps2, *caps3, *caps4;
  gchar *string1 = "video/x-raw(memory:NVMM),format=(string)NV12";
  guint bus_watch_id;
  gchar *path = "/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt";

  gst_init(&argc, &argv);
  loop = g_main_loop_new(NULL,FALSE);


  //create gstreamer elements
  data.pipeline = gst_pipeline_new("test_pipeline");

  data.source = gst_element_factory_make("v4l2src", "source");
  data.filter1 = gst_element_factory_make("capsfilter", "filter1");
  data.filter2 = gst_element_factory_make("capsfilter", "filter2");
  data.filter3 = gst_element_factory_make("capsfilter", "filter3");
  data.filter4 = gst_element_factory_make("capsfilter", "filter4");
  data.convert1 = gst_element_factory_make("nvvidconv","convert1");
  data.convert2 = gst_element_factory_make("nvvidconv", "convert2");
  data.videoconvert1 = gst_element_factory_make("nvvideoconvert", "videoconvert");

  gst_bin_add_many(GST_BIN(data.bin), data.source, data.filter1, data.convert1, data.filter2, data.convert2, data.filter3, data.videoconvert1, data.filter4,NULL );
  gst_element_link_many(data.source,data.filter1, data.convert1,data.filter2,data.convert2, data.filter3,data.videoconvert1, data.filter4, NULL);

  caps1 = gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING,"YUY2", NULL );
  caps2 = gst_caps_from_string(string1);
  caps3 = gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING, "NV12",NULL);
  caps4 = gst_caps_from_string(string1);

  g_object_set(G_OBJECT(data.source), "device", "/dev/video0",NULL);
  g_object_set(G_OBJECT(data.filter1), "caps", caps1, NULL);
  g_object_set(G_OBJECT(data.filter2), "caps", caps2, NULL);
  g_object_set(G_OBJECT(data.filter3), "caps", caps3, NULL);
  g_object_set(G_OBJECT(data.filter4), "caps", caps4, NULL);

  data.mux = gst_element_factory_make("nvstreammux", "stream-muxer");

  gst_bin_add(GST_BIN(data.pipeline), data.mux);

  gst_bin_add(GST_BIN(data.pipeline), GST_BIN(data.bin));
  GstPad *sink_pad = gst_element_get_request_pad(data.mux, "sink_0");

  GstPad *src_pad = gst_element_get_static_pad(data.bin, "src");

  gst_pad_link(src_pad, sink_pad);

  gst_object_unref(sink_pad);
  gst_object_unref(src_pad);

  data.infer = gst_element_factory_make("nvinfer", "primary-nvinference-engine");

  data.tiler = gst_element_factory_make("nvmultistreamtiler", "nvtiler");

  data.videoconvert2 = gst_element_factory_make("nvvideoconvert", "video-convert2");

  data.osd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");
  data.transform = gst_element_factory_make("nvegltransform", "nvegl-transform");
  data.sink = gst_element_factory_make("nveglglessink", "nvvideo-renderer");

  gst_bin_add_many(GST_BIN(data.pipeline), data.mux, data.infer,data.tiler, data.videoconvert2, data.osd, data.transform,data.sink, NULL );
  gst_element_link_many(data.mux, data.infer,data.tiler, data.videoconvert2, data.osd, data.transform,data.sink, NULL);

  g_object_set(G_OBJECT(data.mux), "live-source", 1, "name", "mux", "batch-size", 1, "width", 640, "height", 480, NULL);
  g_object_set(G_OBJECT(data.infer), "config-file-path",path, NULL);
  g_object_set(G_OBJECT(data.infer), "batch-size", 1);
  g_object_set(G_OBJECT(data.tiler), "rows", 1, "columns", 1, "width", 640, "height", 480, NULL);
  gst_element_set_state(data.pipeline,GST_STATE_PLAYING);
  g_print("Running\n");

   /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (data.pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  g_main_loop_run(loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (data.pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (data.pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;


}


and it doesn't show the display anymore. What am I missing? oh and I am also getting this error

five:18933): GLib-GObject-WARNING **: 12:27:23.209: cannot register existing type 'GstInterpolationMethod'

(five:18933): GLib-GObject-CRITICAL **: 12:27:23.209: g_param_spec_enum: assertion 'G_TYPE_IS_ENUM (enum_type)' failed

(five:18933): GLib-GObject-CRITICAL **: 12:27:23.209: validate_pspec_to_install: assertion 'G_IS_PARAM_SPEC (pspec)' failed

(five:18933): GStreamer-CRITICAL **: 12:27:23.209: gst_bin_add_many: assertion 'GST_IS_BIN (bin)' failed

(five:18933): GStreamer-CRITICAL **: 12:27:23.216: gst_bin_add: assertion 'GST_IS_ELEMENT (element)' failed

(five:18933): GStreamer-CRITICAL **: 12:27:23.217: gst_element_get_static_pad: assertion 'GST_IS_ELEMENT (element)' failed

(five:18933): GStreamer-CRITICAL **: 12:27:23.217: gst_pad_link_full: assertion 'GST_IS_PAD (srcpad)' failed

(five:18933): GStreamer-CRITICAL **: 12:27:23.217: gst_object_unref: assertion 'object != NULL' failed

(five:18933): GStreamer-WARNING **: 12:27:23.263: Name 'stream-muxer' is not unique in bin 'test_pipeline', not adding

(five:18933): GLib-GObject-WARNING **: 12:27:23.266: g_object_set_is_valid_property: object class 'GstNvInfer' has no property named '\x90\xa1J\x8fU'

Using winsys: x11 
Creating LL OSD context new
0:00:00.759463907 18933   0x558f53f760 INFO                 nvinfer gstnvinfer.cpp:519:gst_nvinfer_logger:<primary-nvinference-engine> NvDsInferContext[UID 1]:initialize(): Trying to create engine from model files
0:00:27.684391089 18933   0x558f53f760 INFO                 nvinfer gstnvinfer.cpp:519:gst_nvinfer_logger:<primary-nvinference-engine> NvDsInferContext[UID 1]:generateTRTModel(): Storing the serialized cuda engine to file at /opt/nvidia/deepstream/deepstream-4.0/samples/models/Primary_Detector/resnet10.caffemodel_b1_int8.engine
Running

EDIT:

I think it has something to do with the nvvidconv or nvvideoconvert not linking with nvstreammux properly.I was testing with this c application..

#include <gst/gst.h>

int main(int argc, char* argv[])
{
  GstElement *pipeline, *source, *filter1, *filter2, *convert1, *convert2, *filter3, *convert3, *filter4, *convert4, *sink, *mux, *infer;
  GstCaps *caps1, *caps2,*caps3, *caps4;
  gchar* string1 = "video/x-raw(memory:NVMM),format=(string)NV12";
   GstBus *bus;
  GstMessage *msg;
  GstStateChangeReturn ret;

  gst_init(&argc, &argv);

  pipeline = gst_pipeline_new("test_pipeline");

  source = gst_element_factory_make("v4l2src", "source");
  filter1 = gst_element_factory_make("capsfilter", "filter1");
  filter2 = gst_element_factory_make("capsfilter", "filter2");
  filter3 = gst_element_factory_make("capsfilter", "filter3");
  filter4 = gst_element_factory_make("capsfilter", "filter4");
  convert3 = gst_element_factory_make("nvvidconv","convert3");
  convert1 = gst_element_factory_make("nvvidconv", "convert1");
  convert2 = gst_element_factory_make("nvvidconv", "convert2");
  convert4 = gst_element_factory_make("nvvideoconvert", "convert4");
  infer = gst_element_factory_make("nvinfer", "inference");
  sink = gst_element_factory_make("autovideosink", "sink");
  mux = gst_element_factory_make("nvstreammux", "nv-stream-muxer");
  caps1 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "YUY2",NULL);
  caps2 = gst_caps_from_string(string1);
  caps3 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "NV12", NULL);
  caps4 = gst_caps_from_string(string1);
  g_object_set(G_OBJECT(filter1), "caps", caps1,NULL);
  g_object_set(G_OBJECT(filter2), "caps", caps2, NULL);
  g_object_set(G_OBJECT(filter3), "caps", caps3, NULL);
  g_object_set(G_OBJECT(filter4), "caps", caps4, NULL);
  g_object_set(G_OBJECT(mux), "batch-size", 1, "live-source", 1, "width", 640, "height", 480, NULL);
  g_object_set(G_OBJECT(infer),"config-file-path", "/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt",NULL );
    gchar pad_name_sink[16] = "sink_0";
  gchar pad_name_src[16] = "src";
  GstPad *src_pad = gst_element_get_static_pad(convert4, pad_name_src);
  GstPad *sink_pad = gst_element_get_request_pad(mux, pad_name_sink);
   gst_pad_link(src_pad, sink_pad);


   gst_object_unref(sink_pad);
   gst_object_unref(src_pad);

   gst_bin_add_many(GST_BIN(pipeline), source, filter1,convert1,filter2,convert2,filter3,convert3,filter4, convert4,mux,infer,NULL);
  gst_element_link_many(source, filter1,convert1, filter2, convert2,filter3, convert3 ,filter4, convert4,NULL);
  gst_element_link_many(mux, infer,NULL);
    /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  return 0;


}

and I get this

(wtf:17982): GLib-GObject-WARNING **: 15:37:43.936: cannot register existing type 'GstInterpolationMethod'

(wtf:17982): GLib-GObject-CRITICAL **: 15:37:43.936: g_param_spec_enum: assertion 'G_TYPE_IS_ENUM (enum_type)' failed

(wtf:17982): GLib-GObject-CRITICAL **: 15:37:43.937: validate_pspec_to_install: assertion 'G_IS_PARAM_SPEC (pspec)' failed
0:00:00.753282361 17982   0x55ae4f0d60 INFO                 nvinfer gstnvinfer.cpp:519:gst_nvinfer_logger:<inference> NvDsInferContext[UID 1]:initialize(): Trying to create engine from model files
0:00:26.279690176 17982   0x55ae4f0d60 INFO                 nvinfer gstnvinfer.cpp:519:gst_nvinfer_logger:<inference> NvDsInferContext[UID 1]:generateTRTModel(): Storing the serialized cuda engine to file at /opt/nvidia/deepstream/deepstream-4.0/samples/models/Primary_Detector/resnet10.caffemodel_b1_int8.engine
0:00:26.388676632 17982   0x55ae4f1b20 WARN          v4l2bufferpool gstv4l2bufferpool.c:790:gst_v4l2_buffer_pool_start:<source:pool:src> Uncertain or not enough buffers, enabling copy threshold
0:00:28.620696872 17982   0x55ae4f1b20 WARN                 basesrc gstbasesrc.c:3055:gst_base_src_loop:<source> error: Internal data stream error.
0:00:28.620764652 17982   0x55ae4f1b20 WARN                 basesrc gstbasesrc.c:3055:gst_base_src_loop:<source> error: streaming stopped, reason not-linked (-1)
Error received from element source: Internal data stream error.
Debugging information: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:test_pipeline/GstV4l2Src:source:
streaming stopped, reason not-linked (-1)

I dont think the pads are linking. I checked their pad template formats and both nvvidconv and nvvideoconvert can have NV12 as their src and nvstreammux has the capability to receive NV12 as sink. I am still debugging. Any help would be appreciated. Thanks!


回答1:


Sorry it was my fault. I was able to figure out my problem. its in the construction and linking of my pipeline. I was able to set it. And it works well now! Thank you all for the support!

#include <gst/gst.h>

int main(int argc, char *argv[])
{ 
  GstElement *bin, *pipeline, *source, *convert1,*filter1,*convert2,*filter2,*convert3,*filter3,*convert4,*filter4, *muxsink,*infer,*tiler,*osd,*transform,*sink;
  GstBus *bus;
  GstCaps *caps1,*caps2,*caps3,*caps4;
  GstMessage *msg;
  GstStateChangeReturn ret;
  gchar *string1 = "video/x-raw(memory:NVMM),format=(string)NV12";
  gst_init(&argc, &argv);

  pipeline = gst_pipeline_new("wtf-pipeline");
  bin = gst_bin_new("wtf-bin");
  source = gst_element_factory_make("v4l2src", "source");
  filter1 = gst_element_factory_make("capsfilter", "filter");
  convert1 = gst_element_factory_make("nvvidconv", "convert");
    filter2 = gst_element_factory_make("capsfilter", "filter2");
  filter3 = gst_element_factory_make("capsfilter", "filter3");
  filter4 = gst_element_factory_make("capsfilter", "filter4");
    convert3 = gst_element_factory_make("nvvideoconvert", "convert3");
  convert2 = gst_element_factory_make("nvvidconv", "convert2");
  convert4 = gst_element_factory_make("nvvideoconvert", "convert4");
  infer = gst_element_factory_make("nvinfer", "inference");
  tiler = gst_element_factory_make("nvmultistreamtiler", "tiler");
  osd = gst_element_factory_make("nvdsosd", "osd");
  transform = gst_element_factory_make("nvegltransform", "transform");
  sink = gst_element_factory_make("nveglglessink", "sink");

  muxsink = gst_element_factory_make("nvstreammux", "muxsink");

  gst_bin_add_many(GST_BIN(pipeline), source,filter1,convert1,filter2,convert2,filter3,convert4,filter4, muxsink,infer,tiler,convert3,osd,transform,sink, NULL);
  //  gst_bin_add(GST_BIN(pipeline), bin);

  // source(v4l2src) -> filter1(YUY2)->convert1(nvvidconv)->filter2(memoryNV12)->convert2(nvvidconv)
  //-->filter3(NV12string)-->convert4(nvvideoconvert)->filter4(memoryNV12)->mux(sink)->infer(nvinfer)
  //--> tiler(nvmultistreamtiler)->convert3(nvvideoconvert)->osd(nvdsosd)->transform(nvegltransform)->sink(nveglglessink)
  gst_element_link_many(source,filter1,convert1,filter2, convert2,filter3,convert4,filter4,NULL);

  caps1 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING,"YUY2", NULL);
    caps2 = gst_caps_from_string(string1);
  caps3 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "NV12", NULL);
  caps4 = gst_caps_from_string(string1);

  g_object_set(G_OBJECT(filter1), "caps", caps1, NULL);
    g_object_set(G_OBJECT(filter2), "caps", caps2, NULL);
  g_object_set(G_OBJECT(filter3), "caps", caps3, NULL);
  g_object_set(G_OBJECT(filter4), "caps", caps4, NULL);
  //  g_object_set(G_OBJECT(sink), "batch-size", 1, NULL);
  //  g_object_set(G_OBJECT(sink),"width",640,"height",480, "batch-size", 1, "live-source", 1, NULL );
  GstPad *source_pad = gst_element_get_static_pad(filter4, "src");
  guint i=0;
  gchar pad_name[16]= {};
  g_snprintf(pad_name, 15, "sink_%u",i);
  GstPad *sink_pad = gst_element_get_request_pad(muxsink,pad_name);
  g_object_set(G_OBJECT(muxsink),"width",640,"height",480, "batch-size", 1, "live-source", 1, NULL );
  gst_pad_link(source_pad, sink_pad);

  g_object_set(G_OBJECT(infer), "config-file-path", "/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt", "batch-size", 1, NULL);
  g_object_set(G_OBJECT(tiler), "rows", 1, "columns", 1, "width", 640, "height", 480, NULL);
  gst_element_link_many(muxsink,infer,tiler,convert3,osd,transform,sink,NULL);
  gst_element_set_state(pipeline, GST_STATE_PLAYING);
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  return 0;
}




来源:https://stackoverflow.com/questions/59429894/realtime-v4l2src-for-deepstream-test1-c-application-does-not-work

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!