gstreamer appsrc test application

我与影子孤独终老i 提交于 2019-11-28 02:01:43

问题


I am trying to learn gstreamer appsrc plugin to play AV from a transport stream demultiplexer that I wrote (I know plugins are already available, I wanted to do it myself to learn). I have extracted audio and video elementary streams from the MPEG transport stream; now I have to push it to the appsrc plugin and play it using a gst pipeline (this part is not yet clear to me: as to which plugins to use - any tips will be highly appreciated).

I found a sample code on using appsrc, but when I run that, there is no output. I verified that start_feed and read_data functions are indeed invoked. In read_data function, there is a timer check to decide whether to push data or not:

ms = g_timer_elapsed(app->timer, NULL);
printf("ms %f\n", ms);
if (ms > 1.0/20.0) {

I don't know what this timer is for, but the value of ms is always around 0.035 and data is never pushed into the appsrc. So I changed the if condition to if(ms > 0.03) but then I got an internal data flow error:

ms 0.033747
ms 0.000010
ERROR from element mysource: Internal data flow error.
Debugging info: gstbasesrc.c(2582): gst_base_src_loop (): /GstPipeline:pipeline0/GstAppSrc:mysource:
streaming task paused, reason not-negotiated (-4)

Can someone tell me how to tweak this to see the video? The code from the linked website is given below.

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

#include <stdio.h>
#include <string.h>
#include <stdlib.h>

#include <gdk-pixbuf/gdk-pixbuf.h>

GST_DEBUG_CATEGORY (appsrc_pipeline_debug);
#define GST_CAT_DEFAULT appsrc_pipeline_debug

typedef struct _App App;

struct _App
{
  GstElement *pipeline;
  GstElement *appsrc;

  GMainLoop *loop;
  guint sourceid;

  GTimer *timer;

};

App s_app;

static gboolean
read_data (App * app)
{
    guint len;
    GstFlowReturn ret;
    gdouble ms;

    ms = g_timer_elapsed(app->timer, NULL);
    printf("ms %f\n", ms);
    if (ms > 1.0/20.0) {
        GstBuffer *buffer;
        GdkPixbuf *pb;
        gboolean ok = TRUE;

        buffer = gst_buffer_new();

        pb = gdk_pixbuf_new(GDK_COLORSPACE_RGB, FALSE, 8, 640, 480);
        gdk_pixbuf_fill(pb, 0xffffffff);

        GST_BUFFER_DATA (buffer) = gdk_pixbuf_get_pixels(pb);
        GST_BUFFER_SIZE (buffer) = 640*480*3*sizeof(guchar);

        GST_DEBUG ("feed buffer");
        g_signal_emit_by_name (app->appsrc, "push-buffer", buffer, &ret);
        gst_buffer_unref (buffer);

        if (ret != GST_FLOW_OK) {
            /* some error, stop sending data */
            GST_DEBUG ("some error");
            ok = FALSE;
        }

        g_timer_start(app->timer);

        return ok;
    }

    // g_signal_emit_by_name (app->appsrc, "end-of-stream", &ret);
    return FALSE;
}

/* This signal callback is called when appsrc needs data, we add an idle handler
* to the mainloop to start pushing data into the appsrc */
static void
start_feed (GstElement * pipeline, guint size, App * app)
{
  if (app->sourceid == 0) {
    GST_DEBUG ("start feeding");
    app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
  }
}

/* This callback is called when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */
static void
stop_feed (GstElement * pipeline, App * app)
{
  if (app->sourceid != 0) {
    GST_DEBUG ("stop feeding");
    g_source_remove (app->sourceid);
    app->sourceid = 0;
  }
}

static gboolean
bus_message (GstBus * bus, GstMessage * message, App * app)
{
  GST_DEBUG ("got message %s",
      gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR: {
        GError *err = NULL;
        gchar *dbg_info = NULL;

        gst_message_parse_error (message, &err, &dbg_info);
        g_printerr ("ERROR from element %s: %s\n",
            GST_OBJECT_NAME (message->src), err->message);
        g_printerr ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
        g_error_free (err);
        g_free (dbg_info);
        g_main_loop_quit (app->loop);
        break;
    }
    case GST_MESSAGE_EOS:
      g_main_loop_quit (app->loop);
      break;
    default:
      break;
  }
  return TRUE;
}

int
main (int argc, char *argv[])
{
  App *app = &s_app;
  GError *error = NULL;
  GstBus *bus;
  GstCaps *caps;

  gst_init (&argc, &argv);

  GST_DEBUG_CATEGORY_INIT (appsrc_pipeline_debug, "appsrc-pipeline", 0,
      "appsrc pipeline example");

  /* create a mainloop to get messages and to handle the idle handler that will
* feed data to appsrc. */
  app->loop = g_main_loop_new (NULL, TRUE);
  app->timer = g_timer_new();

  app->pipeline = gst_parse_launch("appsrc name=mysource ! video/x-raw-rgb,width=640,height=480,bpp=24,depth=24 ! ffmpegcolorspace ! videoscale method=1 ! theoraenc bitrate=150 ! udpsink host=127.0.0.1 port=1234", NULL);
  g_assert (app->pipeline);

  bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
  g_assert(bus);

  /* add watch for messages */
  gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);

  /* get the appsrc */
    app->appsrc = gst_bin_get_by_name (GST_BIN(app->pipeline), "mysource");
    g_assert(app->appsrc);
    g_assert(GST_IS_APP_SRC(app->appsrc));
    g_signal_connect (app->appsrc, "need-data", G_CALLBACK (start_feed), app);
    g_signal_connect (app->appsrc, "enough-data", G_CALLBACK (stop_feed), app);

  /* set the caps on the source */
  caps = gst_caps_new_simple ("video/x-raw-rgb",
    "bpp",G_TYPE_INT,24,
    "depth",G_TYPE_INT,24,
     "width", G_TYPE_INT, 640,
     "height", G_TYPE_INT, 480,
     NULL);
   gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps);


  /* go to playing and wait in a mainloop. */
  gst_element_set_state (app->pipeline, GST_STATE_PLAYING);

  /* this mainloop is stopped when we receive an error or EOS */
  g_main_loop_run (app->loop);

  GST_DEBUG ("stopping");

  gst_element_set_state (app->pipeline, GST_STATE_NULL);

  gst_object_unref (bus);
  g_main_loop_unref (app->loop);

  return 0;
}

Update: I tried replacing udpsink with normal xvimagesink and it didn't help


回答1:


Since no one has bothered to answer, I will post a working test code for appsrc plugin - although this doesn't answer why the code in the question doesn't work, this will help people landing here from Google when they search for appsrc test code.

#include <stdio.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

typedef struct {
    GstPipeline *pipeline;
    GstAppSrc *src;
    GstElement *sink;
    GstElement *decoder;
    GstElement *ffmpeg;
    GstElement *xvimagesink;
    GMainLoop *loop;
    guint sourceid;
    FILE *file;
}gst_app_t;

static gst_app_t gst_app;

#define BUFF_SIZE (1024)

static gboolean read_data(gst_app_t *app)
{
    GstBuffer *buffer;
    guint8 *ptr;
    gint size;
    GstFlowReturn ret;

    ptr = g_malloc(BUFF_SIZE);
    g_assert(ptr);

    size = fread(ptr, 1, BUFF_SIZE, app->file);

    if(size == 0){
        ret = gst_app_src_end_of_stream(app->src);
        g_debug("eos returned %d at %d\n", ret, __LINE__);
        return FALSE;
    }

    buffer = gst_buffer_new();
    GST_BUFFER_MALLOCDATA(buffer) = ptr;
    GST_BUFFER_SIZE(buffer) = size;
    GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);

    ret = gst_app_src_push_buffer(app->src, buffer);

    if(ret !=  GST_FLOW_OK){
        g_debug("push buffer returned %d for %d bytes \n", ret, size);
        return FALSE;
    }

    if(size != BUFF_SIZE){
        ret = gst_app_src_end_of_stream(app->src);
        g_debug("eos returned %d at %d\n", ret, __LINE__);
        return FALSE;
    }

    return TRUE;
}

static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
    if (app->sourceid == 0) {
        GST_DEBUG ("start feeding");
        app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
    }
}

static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
    if (app->sourceid != 0) {
        GST_DEBUG ("stop feeding");
        g_source_remove (app->sourceid);
        app->sourceid = 0;
    }
}

static void on_pad_added(GstElement *element, GstPad *pad)
{
    GstCaps *caps;
    GstStructure *str;
    gchar *name;
    GstPad *ffmpegsink;
    GstPadLinkReturn ret;

    g_debug("pad added");

    caps = gst_pad_get_caps(pad);
    str = gst_caps_get_structure(caps, 0);

    g_assert(str);

    name = (gchar*)gst_structure_get_name(str);

    g_debug("pad name %s", name);

    if(g_strrstr(name, "video")){

        ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink");
        g_assert(ffmpegsink);
        ret = gst_pad_link(pad, ffmpegsink);
        g_debug("pad_link returned %d\n", ret);
        gst_object_unref(ffmpegsink);
    }
    gst_caps_unref(caps);
}

static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
    gst_app_t *app = (gst_app_t*)ptr;

    switch(GST_MESSAGE_TYPE(message)){

    case GST_MESSAGE_ERROR:{
        gchar *debug;
        GError *err;

        gst_message_parse_error(message, &err, &debug);
        g_print("Error %s\n", err->message);
        g_error_free(err);
        g_free(debug);
        g_main_loop_quit(app->loop);
    }
    break;

    case GST_MESSAGE_EOS:
        g_print("End of stream\n");
        g_main_loop_quit(app->loop);
        break;

    default:
        g_print("got message %s\n", \
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
        break;
    }

    return TRUE;
}

int main(int argc, char *argv[])
{
    gst_app_t *app = &gst_app;
    GstBus *bus;
    GstStateChangeReturn state_ret;

    if(argc != 2){
        printf("File name not specified\n");
        return 1;
    }

    app->file = fopen(argv[1], "r");

    g_assert(app->file);

    gst_init(NULL, NULL);

    app->pipeline = (GstPipeline*)gst_pipeline_new("mypipeline");
    bus = gst_pipeline_get_bus(app->pipeline);
    gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
    gst_object_unref(bus);

    app->src = (GstAppSrc*)gst_element_factory_make("appsrc", "mysrc");
    app->decoder = gst_element_factory_make("decodebin", "mydecoder");
    app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg");
    app->xvimagesink = gst_element_factory_make("xvimagesink", "myvsink");

    g_assert(app->src);
    g_assert(app->decoder);
    g_assert(app->ffmpeg);
    g_assert(app->xvimagesink);

    g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
    g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);
    g_signal_connect(app->decoder, "pad-added", G_CALLBACK(on_pad_added), app->decoder);

    gst_bin_add_many(GST_BIN(app->pipeline), (GstElement*)app->src, app->decoder, app->ffmpeg, app->xvimagesink, NULL);

    if(!gst_element_link((GstElement*)app->src, app->decoder)){
        g_warning("failed to link src anbd decoder");
    }

    if(!gst_element_link(app->ffmpeg, app->xvimagesink)){
        g_warning("failed to link ffmpeg and xvsink");
    }

    state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_PLAYING);
    g_warning("set state returned %d\n", state_ret);

    app->loop = g_main_loop_new(NULL, FALSE);
    printf("Running main loop\n");
    g_main_loop_run(app->loop);

    state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_NULL);
    g_warning("set state null returned %d\n", state_ret);

    return 0;
}



回答2:


With much thanks to the gurus on IRC/freenode/#gstreamer (especially __tim!), I've figured out how to fix the bugs in your sample code.

Bug 1. The last line of read_data() should "return TRUE" so that this function remains on the bus, and will be called repeatedly. Only return FALSE when you want to stop sending data completely.

Bug 2. Replace gst_caps_new_simple() with gst_video_format_new_caps(). You'll also need to include gst/video/video.h, and link against -lgstvideo-0.10.

Here's my version of the code, showing both how to display the images and how to stream an theora-encoded version through UDP. Make note of the usage of videorate for the latter case, since theora expects a constant frame rate source.

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/video/video.h>

#include <stdio.h>
#include <string.h>
#include <stdlib.h>

#include <gdk-pixbuf/gdk-pixbuf.h>

GST_DEBUG_CATEGORY (appsrc_pipeline_debug);
#define GST_CAT_DEFAULT appsrc_pipeline_debug

typedef struct _App App;

struct _App
{
  GstElement *pipeline;
  GstElement *appsrc;

  GMainLoop *loop;
  guint sourceid;

  GTimer *timer;

};

App s_app;

static gboolean
read_data (App * app)
{
    guint len;
    GstFlowReturn ret;
    gdouble ms;

    ms = g_timer_elapsed(app->timer, NULL);
    if (ms > 1.0/20.0) {
        GstBuffer *buffer;
        GdkPixbuf *pb;
        gboolean ok = TRUE;

        buffer = gst_buffer_new();

        pb = gdk_pixbuf_new(GDK_COLORSPACE_RGB, FALSE, 8, 640, 480);
        gdk_pixbuf_fill(pb, 0xffffffff);

        GST_BUFFER_DATA (buffer) = gdk_pixbuf_get_pixels(pb);
        GST_BUFFER_SIZE (buffer) = 640*480*3*sizeof(guchar);

        GST_DEBUG ("feed buffer");
        g_signal_emit_by_name (app->appsrc, "push-buffer", buffer, &ret);
        gst_buffer_unref (buffer);

        if (ret != GST_FLOW_OK) {
            /* some error, stop sending data */
            GST_DEBUG ("some error");
            ok = FALSE;
        }

        g_timer_start(app->timer);

        return ok;
    }

    // g_signal_emit_by_name (app->appsrc, "end-of-stream", &ret);
    return TRUE;
}

/* This signal callback is called when appsrc needs data, we add an idle handler
* to the mainloop to start pushing data into the appsrc */
static void
start_feed (GstElement * pipeline, guint size, App * app)
{
  if (app->sourceid == 0) {
    GST_DEBUG ("start feeding");
    app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
  }
}

/* This callback is called when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */
static void
stop_feed (GstElement * pipeline, App * app)
{
  if (app->sourceid != 0) {
    GST_DEBUG ("stop feeding");
    g_source_remove (app->sourceid);
    app->sourceid = 0;
  }
}

static gboolean
bus_message (GstBus * bus, GstMessage * message, App * app)
{
  GST_DEBUG ("got message %s",
      gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR: {
        GError *err = NULL;
        gchar *dbg_info = NULL;

        gst_message_parse_error (message, &err, &dbg_info);
        g_printerr ("ERROR from element %s: %s\n",
            GST_OBJECT_NAME (message->src), err->message);
        g_printerr ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
        g_error_free (err);
        g_free (dbg_info);
        g_main_loop_quit (app->loop);
        break;
    }
    case GST_MESSAGE_EOS:
      g_main_loop_quit (app->loop);
      break;
    default:
      break;
  }
  return TRUE;
}

int
main (int argc, char *argv[])
{
  App *app = &s_app;
  GError *error = NULL;
  GstBus *bus;
  GstCaps *caps;

  gst_init (&argc, &argv);

  GST_DEBUG_CATEGORY_INIT (appsrc_pipeline_debug, "appsrc-pipeline", 0,
      "appsrc pipeline example");

  /* create a mainloop to get messages and to handle the idle handler that will
* feed data to appsrc. */
  app->loop = g_main_loop_new (NULL, TRUE);
  app->timer = g_timer_new();

  // Option 1: Display on screen via xvimagesink
  app->pipeline = gst_parse_launch("appsrc name=mysource ! video/x-raw-rgb,width=640,height=480 ! ffmpegcolorspace ! videoscale method=1 ! xvimagesink", NULL);

  // Option 2: Encode using Theora and stream through UDP
  // NOTE: first launch receiver by executing:
  //       gst-launch udpsrc port=5000 ! theoradec ! ffmpegcolorspace ! xvimagesink
  //app->pipeline = gst_parse_launch("appsrc name=mysource ! videorate ! ffmpegcolorspace ! videoscale method=1 ! video/x-raw-yuv,width=640,height=480,framerate=\(fraction\)15/1 ! theoraenc bitrate=700 ! udpsink host=127.0.0.1 port=5000", NULL);

  g_assert (app->pipeline);

  bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
  g_assert(bus);

  /* add watch for messages */
  gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);

  /* get the appsrc */
    app->appsrc = gst_bin_get_by_name (GST_BIN(app->pipeline), "mysource");
    g_assert(app->appsrc);
    g_assert(GST_IS_APP_SRC(app->appsrc));
    g_signal_connect (app->appsrc, "need-data", G_CALLBACK (start_feed), app);
    g_signal_connect (app->appsrc, "enough-data", G_CALLBACK (stop_feed), app);

  /* set the caps on the source */
  caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_RGB, 640, 480, 0, 1, 4, 3);
  gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps);


  /* go to playing and wait in a mainloop. */
  gst_element_set_state (app->pipeline, GST_STATE_PLAYING);

  /* this mainloop is stopped when we receive an error or EOS */
  g_main_loop_run (app->loop);

  GST_DEBUG ("stopping");

  gst_element_set_state (app->pipeline, GST_STATE_NULL);

  gst_object_unref (bus);
  g_main_loop_unref (app->loop);

  return 0;
}


来源:https://stackoverflow.com/questions/8746719/gstreamer-appsrc-test-application

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!