LINUX.ORG.RU

bus call в Gstreamer

 


0

2

Всем привет!

Необходимо проигрывать видео, которое получаю по udp. Когда на порту нет данных (сервер недоступен прим.) необходимо показать заставку.

Server:

gst-launch-1.0 -v filesrc location=/video/0001.mp4 ! decodebin ! vpuenc_h264 bitrate=8192 ! rtph264pay ! udpsink host=192.168.5.255 port=5555

Client:

#define GST_PLAYER_UDP "udpsrc port=5555 caps=\"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\"\
    ! rtph264depay ! decodebin ! videoconvert ! autovideosink"
....

static gboolean bus_call (GstBus     *bus, GstMessage *msg, gpointer    data)
{
  printf("msg=%s (from %s)\n",gst_message_type_get_name(GST_MESSAGE_TYPE(msg)), GST_MESSAGE_SRC_NAME(msg));

  return TRUE;
}
...
GstElement* gstLaunch()
{
  GstElement *pipeline;
  GError *error = NULL;

  pipeline = gst_parse_launch (GST_PLAYER_UDP, &error);
  if (!pipeline){
    g_print ("\tParse error: %s\n", error->message);
    return 0;
  }

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  guint watch_id = gst_bus_add_watch (bus, bus_call, NULL);
  gst_object_unref (bus);

  return pipeline;
}

Когда вещание начинается, все отлично и я могу увидеть событие начала потока:

display(/dev/fb0) resolution is (1280x800).
====== OVERLAYSINK: 4.0.9 build on May 12 2017 10:36:47. ======
display(/dev/fb0) resolution is (1280x800).
display(/dev/fb0) resolution is (1280x800).
msg=state-changed (from autovideosink0)
msg=state-changed (from videoconvert0)
msg=state-changed (from typefind)
msg=state-changed (from decodebin0)
msg=state-changed (from rtph264depay0)
msg=state-changed (from udpsrc0)
msg=state-changed (from pipeline0)
msg=state-changed (from videoconvert0)
msg=state-changed (from typefind)
msg=state-changed (from rtph264depay0)
msg=stream-status (from src)
msg=state-changed (from udpsrc0)
msg=state-changed (from pipeline0)
msg=stream-status (from src)
msg=new-clock (from pipeline0)
msg=state-changed (from videoconvert0)
msg=state-changed (from rtph264depay0)
msg=state-changed (from udpsrc0)
msg=state-changed (from h264parse0)
msg=state-changed (from h264parse0)
[INFO]  Product Info: i.MX6Q/D/S
msg=state-changed (from vpudec0)
[INFO]  Product Info: i.MX6Q/D/S
====== VPUDEC: 4.0.9 build on May 12 2017 10:36:53. ======
  wrapper: 1.0.65 (VPUWRAPPER_ARM_LINUX Build on May 12 2017 10:30:05)
  vpulib: 5.4.33
  firmware: 3.1.1.46072
msg=state-changed (from vpudec0)
[INFO]  bitstreamMode 1, chromaInterleave 1, mapType 0, tiled2LinearEnable 0
msg=state-changed (from decodebin0)
msg=stream-start (from pipeline0)<-------------------------start
msg=state-changed (from autovideosink0-actual-sink-overlay)
msg=state-changed (from autovideosink0)
msg=async-done (from pipeline0)
msg=state-changed (from autovideosink0-actual-sink-overlay)
msg=state-changed (from autovideosink0)
msg=state-changed (from vpudec0)
msg=state-changed (from capsfilter0)
msg=state-changed (from h264parse0)
msg=state-changed (from typefind)
msg=state-changed (from decodebin0)
msg=state-changed (from pipeline0)
msg=qos (from autovideosink0-actual-sink-overlay)
msg=tag (from autovideosink0-actual-sink-overlay)
но когда трансляция завершена или сервер недоступен, то нет никакой возможности понять что сейчас нет данных, не эмиится bus_call. Как я могу понять что трансляция закончена или сервер недоступен?


Я не силен в GStreamer. Но исходя из здравого смысла, если ты принимаешь данные по UDP, то отсутствие связи определяем по отсутствию принимаемых данных, т.е. по таймауту.

pathfinder ★★★★
()
Последнее исправление: pathfinder (всего исправлений: 1)

Решение. Надеюсь, кому-нибудь пригодится

#include <gst/gst.h>
#include <iostream>
#include <string>
#include <gst/app/gstappsink.h>
/************* 
global values 
*****    ***********/ 

GstElement *pipeline; 
GMainLoop *loop; 
GstBus *bus; 
GstMessage* msg; 
GstState state; 
GstClock* PipeClock;
bool gRetrievedConnCheck;
int bufferCount;
int preBufferCount;
#define GST_PLAYER_UDP "udpsrc name=source port=5555 caps=\"application/x-rtp, media=(string)video,clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! tee name=tee ! queue ! decodebin ! videoconvert ! autovideosink tee. ! queue ! appsink emit-signals=true name=appsink" 

static gboolean bus_cb (GstBus *bus, GstMessage *msg, gpointer *data); 
    static gboolean timeout_cb (gpointer user_data);
	static gboolean TestingConnection(gpointer user_data);
	static void ImmediateLoopQuit();
	static GstFlowReturn appsink_signal (GstElement* object,gpointer user_data);
	void VideoHasFinished();

int main() 
{ 
	GError* error;
	bufferCount = 0;
	preBufferCount = 0;
	gst_debug_set_threshold_from_string("*:3",TRUE);
        /* Initialize GStreamer */ 
        gst_init (NULL,NULL); 

        /* Build the pipeline */ 
        pipeline = gst_parse_launch (GST_PLAYER_UDP, NULL);
        
        loop = g_main_loop_new (NULL, FALSE); 

        /* Start playing */ 
        gst_element_set_state (pipeline, GST_STATE_PLAYING); 
		PipeClock = gst_pipeline_get_clock(GST_PIPELINE (pipeline));

        /* Wait until error or EOS */ 
         bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); 
         if(bus == NULL) 
         { 
                std::cout << "\nNo Bus\n"; 
                return 0; 
         } 
        gst_bus_add_signal_watch(bus); 
        g_signal_connect (bus, "message", G_CALLBACK (bus_cb), pipeline); 
        
		GstElement* appsink = gst_bin_get_by_name(GST_BIN(pipeline),"appsink");
		g_signal_connect(appsink, "new-sample", G_CALLBACK(appsink_signal),loop);

        g_timeout_add_seconds (1, timeout_cb, loop); 

        g_main_loop_run(loop); 
        
        /* Free resources */ 
        gst_object_unref (bus); 
        gst_element_set_state (pipeline, GST_STATE_NULL); 
        gst_object_unref (pipeline); 
		VideoHasFinished();
		return 0;
} 

void VideoHasFinished()
{
	std::cout << "\n-----------------------------------------------\n";
	std::cout << "\n-----------------------------------------------\n";
	std::cout << "\n---------Video Has Finished--------------------\n";
	std::cout << "\n-----------------------------------------------\n";
	std::cout << "\n-----------------------------------------------\n";
	std::cout << "\n-----------------------------------------------\n";
	

}

GstFlowReturn appsink_signal (GstElement* object,gpointer user_data)
{

	GstFlowReturn flow;
	std::string TheFile = "";
	GstSample* _app_sample;
	GstMapInfo _app_map;
	GstBuffer* _app_buffer;
	flow = GST_FLOW_OK;
	try{
	g_signal_emit_by_name(object,"pull-sample",&_app_sample);
	
	if(_app_sample != NULL)
	{		
		_app_buffer = gst_sample_get_buffer(_app_sample);
		
		if(_app_buffer != NULL)
		{
			++bufferCount;
		}

		if(_app_sample != NULL)
		{
			gst_sample_unref(_app_sample);
			_app_sample = NULL;
		}

	}
	else
	{
		std::cout << "\nNo Sample\n";
	}
	if(gst_app_sink_is_eos ((GstAppSink*)object))
	{
		std::cout << "\nEos Received\n";
		return GST_FLOW_EOS;
	}
	}
	catch(std::exception ex)
	{
		std::string error = ex.what();
	}
	return flow;    
}


/********************** 
    bus method 
******************/ 
gboolean bus_cb (GstBus *bus, GstMessage *msg, gpointer *data) { 
	gboolean ret = FALSE;
  switch (GST_MESSAGE_TYPE (msg)) 
  { 
        case GST_MESSAGE_ERROR: 
			{ 
      GError *err; 
      gchar *debug; 

      gst_message_parse_error (msg, &err, &debug); 
      g_print ("Error: %s\n", err->message); 
      g_error_free (err); 
      g_free (debug); 
      gst_element_set_state (pipeline, GST_STATE_READY); 
      g_main_loop_quit (loop); 
		} 
		return FALSE;
    case GST_MESSAGE_EOS: 
      /* end-of-stream */ 
      gst_element_set_state (pipeline, GST_STATE_READY); 
      g_main_loop_quit (loop); 
	  return FALSE;
    case GST_MESSAGE_STATE_CHANGED: 
        { 
        GstElement* source = gst_bin_get_by_name(GST_BIN(pipeline),"source"); 

                if (GST_MESSAGE_SRC (msg) == GST_OBJECT(source)) { 
            GstState old_state, new_state, pending_state; 
            gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
                state = new_state; 
                gst_object_unref(source); 
				}
        } 
        break; 
    default: 
      
      break; 
    
  }
      return TRUE;
} 

gboolean timeout_cb (gpointer user_data) 
{ 
        static int time = 0; 
        static int timeout = 0; 
		static int SameBufferCount = 0;
        int MaxTimeout = 10;	
		if(PipeClock)
		{
                switch (state) 
                { 
                case GST_STATE_READY: 
                { 
                   std::cout << "\nSTATE Ready\n"; 
                 } 
                case GST_STATE_PLAYING: 
                        { 
                               std::cout << "\nSTATE Playing\n";
							   if(SameBufferCount == 4)
								   ImmediateLoopQuit();
							   else if(bufferCount == preBufferCount)
								   ++SameBufferCount;
							   else
							   {
								   preBufferCount = bufferCount;
								   SameBufferCount = 0;
							   }

                        } 
                        break; 
                case GST_STATE_PAUSED: 
                        { 
                               std::cout << "\nSTATE Paused\n";
                                break; 
                        } 
                case GST_STATE_NULL: 
                               std::cout << "\nSTATE Null\n";
                        return FALSE; 
                default: 
                        break; 
                } 
  return TRUE; 
		}
		else
		{
			std::cout << "No Clock";
		}
}

void ImmediateLoopQuit()
{
	g_main_loop_quit(loop);
}

Ya-NET
() автор топика
Вы не можете добавлять комментарии в эту тему. Тема перемещена в архив.