gstreamer,在 winforms(和 WPF)中渲染 rtspsrc

gstreamer, rendering rtspsrc in winforms (and WPF)

我正在尝试编写的应用程序基于网络上的视频服务器获取视频流,并将其显示在 winforms window 中(稍后我希望在 WPF 中托管相同类型的控件) .我正在使用 gstreamer-sharp,因为我的应用程序基于 c#.net。

我根据 中的代码示例成功地让 videotestsrc 工作,并且能够使用 VideoOverlayAdapter 和一组 winForms 面板创建 testvideosrc 的多个实例,根据需要在 window 中显示。

当我开始让 rtspsrc 做同样的事情时,我自然 运行 遇到了一些我试图克服的障碍,下面是我的 class 的代码。

而不是 link 初始化代码中的 rtspsrc 我相信我需要 link rtspsrc 的新垫到下一个元素(在本例中为 rtph264depay),这就是我的地方运行 遇到麻烦。

PadAdded 事件似乎有时会在启动程序后的几秒钟内触发,有时根本不会触发?服务器在基本教程(第 1 部分)的 gstreamer-sharp 版本上运行良好,并且具有良好的延迟(很容易小于 300 毫秒,但一旦我的应用程序运行,我需要进行玻璃对玻璃测试)。

此外,一旦 PadAdded 事件最终触发,我在尝试 link 将新垫 link 放入 rtph264depay 接收器垫时会得到一个 NOFORMAT 状态。

我还注意到我似乎没有收到准备 window 处理总线同步消息,我会像 gstVideoOverlay 示例中那样设置视频覆盖适配器(所以我不会输出到 window handle 我需要,即使 pad linking 成功了)。

我没能找到这个特殊问题(rtspsrc pad 没有 linking 到 rtph264depay sink pad)因为类似的问题似乎是关于 linking 其他元素在一起。

根据调试消息,初始化代码中剩余元素的初始化 link 成功。

最终目标是将帧放入 OpenCV/Emgu 并进行一些分析和基本的叠加工作。

如有任何帮助,我们将不胜感激。

非常感谢!

/// <summary>
/// class to create a gstreamer pipeline based on an rtsp stream at the provided URL
/// </summary>
class gstPipeline2
{
    // elements for the pipeline
    private Element rtspsrc, rtph264depay, decoder, videoConv, videoSink;
    private System.Threading.Thread mainGLibThread;
    private GLib.MainLoop mainLoop;

    // the window handle (passed in)
    private IntPtr windowHandle;
    // our pipeline
    private Pipeline currentPipeline = null;

    /// <summary>
    /// Create a new gstreamer pipeline rendering the stream at URL into the provided window handle 
    /// </summary>
    /// <param name="WindowHandle">The handle of the window to render to </param>
    /// <param name="Url">The url of the video stream</param>
    public gstPipeline2(IntPtr WindowHandle, string Url)
    {
        windowHandle = WindowHandle;    // get the handle and save it locally

        // initialise the gstreamer library and associated threads (for diagnostics)
        Gst.Application.Init(); 
        mainLoop = new GLib.MainLoop();
        mainGLibThread = new System.Threading.Thread(mainLoop.Run);
        mainGLibThread.Start();

        // create each element now for the pipeline
        // starting with the rtspsrc
        rtspsrc = ElementFactory.Make("rtspsrc", "udpsrc0");  // create an rtsp source
        rtspsrc["location"] = Url;   // and set its location (the source of the data)
        rtph264depay = ElementFactory.Make("rtph264depay", "rtph264depay0");    
        decoder = ElementFactory.Make("avdec_h264", "decoder0");    
        videoConv = ElementFactory.Make("videoconvert", "videoconvert0");   
        videoSink = ElementFactory.Make("autovideosink", "sink0");  // and finally the sink to render the video (redirected to the required window handle below in Bus_SyncMessage() ) 

        // create our pipeline which links all the elements together into a valid data flow
        currentPipeline = new Pipeline("pipeline");
        currentPipeline.Add(rtspsrc, rtph264depay, decoder, videoConv, videoSink); // add the required elements into it

        // link the various bits together in the correct order
        if(!rtph264depay.Link(decoder))
            System.Diagnostics.Debug.WriteLine("rtph264depay could not be linked to decoder (bad)");
        else
            System.Diagnostics.Debug.WriteLine("rtph264depay linked to decoder (good)");

        if (!decoder.Link(videoConv))
            System.Diagnostics.Debug.WriteLine("decoder could not be linked to videoconvert (bad)");
        else
            System.Diagnostics.Debug.WriteLine("decoder linked to videoconvert (good)");

        if (!videoConv.Link(videoSink))
            System.Diagnostics.Debug.WriteLine("videoconvert could not be linked to autovideosink (bad)");
        else
            System.Diagnostics.Debug.WriteLine("videoconvert linked to autovideosink (good)");

        rtspsrc.PadAdded += Rtspsrc_PadAdded; // subscribe to the PadAdded event so we can link new pads (sources of data?) to the depayloader when they arrive

        // subscribe to the messaging system of the bus and pipeline so we can minotr status as we go
        Bus bus = currentPipeline.Bus;
        bus.AddSignalWatch();
        bus.Message += Bus_Message;

        bus.EnableSyncMessageEmission();
        bus.SyncMessage += Bus_SyncMessage;

        // finally set the state of the pipeline running so we can get data
        var setStateReturn = currentPipeline.SetState(State.Null);
        System.Diagnostics.Debug.WriteLine("SetStateNULL returned: " + setStateReturn.ToString());
        setStateReturn = currentPipeline.SetState(State.Ready);
        System.Diagnostics.Debug.WriteLine("SetStateReady returned: " + setStateReturn.ToString());
        setStateReturn = currentPipeline.SetState(State.Playing);
        System.Diagnostics.Debug.WriteLine("SetStatePlaying returned: " + setStateReturn.ToString());
    }

    private void Rtspsrc_PadAdded(object o, PadAddedArgs args)
    {
        System.Diagnostics.Debug.WriteLine("Rtspsrc_PadAdded: called with new pad named: " + args.NewPad.Name);

        // a pad has been added to the source so we need to link it to the rest of the pipeline to ultimately display it onscreen
        Pad sinkPad = rtph264depay.GetStaticPad("sink");   // get the sink pad for the one we have recieved  so we can link to the depayloader element
        System.Diagnostics.Debug.WriteLine("Rtspsrc_PadAdded: rtps264depay sink pad returned: " + sinkPad.Name);

        PadLinkReturn ret = args.NewPad.Link(sinkPad);
        System.Diagnostics.Debug.WriteLine("Rtspsrc_PadAdded: link attempt returned: " + ret.ToString());
    }

    public void killProcess()
    {
        mainLoop.Quit();
    }

    private void Bus_SyncMessage(object o, SyncMessageArgs args)
    {
        if (Gst.Video.Global.IsVideoOverlayPrepareWindowHandleMessage(args.Message))
        {
            System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: Message prepare window handle received by: " + args.Message.Src.Name + " " + args.Message.Src.GetType().ToString());

            if (args.Message.Src != null)
            {
                // these checks were in the testvideosrc example and failed, args.Message.Src is always Gst.Element???
                if (args.Message.Src is Gst.Video.VideoSink)
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is VideoSink");
                else
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is NOT VideoSink");

                if (args.Message.Src is Gst.Bin)
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is Bin");
                else
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is NOT Bin");

                try
                {
                    args.Message.Src["force-aspect-ratio"] = true;
                }
                catch (PropertyNotFoundException) { }

                try
                {
                    Gst.Video.VideoOverlayAdapter adapter = new VideoOverlayAdapter(args.Message.Src.Handle);
                    adapter.WindowHandle = windowHandle;
                    adapter.HandleEvents(true);
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: Handle passed to adapter: " + windowHandle.ToString());
                }
                catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: Exception Thrown (overlay stage): " + ex.Message); }
            }
        }
        else
        {
            string info;
            IntPtr prt;
            args.Message.ParseInfo(out prt, out info);
            System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: " + args.Message.Type.ToString() + " - " + info);
        }
    }

    private void Bus_Message(object o, MessageArgs args)
    {
        var msg = args.Message;
        //System.Diagnostics.Debug.WriteLine("HandleMessage received msg of type: {0}", msg.Type);
        switch (msg.Type)
        {
            case MessageType.Error:
                //
                GLib.GException err;
                string debug;
                System.Diagnostics.Debug.WriteLine("Bus_Message: Error received: " + msg.ToString());
                break;
            case MessageType.StreamStatus:
                Gst.StreamStatusType status;
                Element theOwner;
                msg.ParseStreamStatus(out status, out theOwner);
                System.Diagnostics.Debug.WriteLine("Bus_Message: Case SteamingStatus: status is: " + status + " ; Owner is: " + theOwner.Name);
                break;
            case MessageType.StateChanged:
                State oldState, newState, pendingState;
                msg.ParseStateChanged(out oldState, out newState, out pendingState);
                if (newState == State.Paused)
                    args.RetVal = false;
                System.Diagnostics.Debug.WriteLine("Bus_Message: Pipeline state changed from {0} to {1}: ; Pending: {2}", Element.StateGetName(oldState), Element.StateGetName(newState), Element.StateGetName(pendingState));
                break;
            case MessageType.Element:
                System.Diagnostics.Debug.WriteLine("Bus_Message: Element message: {0}", args.Message.ToString());
                break;
            default:
                System.Diagnostics.Debug.WriteLine("Bus_Message: HandleMessage received msg of type: {0}", msg.Type);
                break;
        }
        args.RetVal = true;
    }
}

好的,我设法克服了我遇到的问题。

第一个问题(添加的 pad 没有被一致地调用)似乎通过构建 x64 而不是任何 cpu 或 x86 来解决。我怀疑我的 gstreamer 库安装没有正确完成。

第二个问题(链接新 pad 时为 NOFORMAT)需要做更多的工作。最后我听从了 Florian 的建议并查看了使用 uridecodebin 作为源代码并将新的 pad 直接链接到 autovideosink ....中间没有元素。

我现在添加了一个一致的新 pad,并且每次都发送准备 window 处理总线同步消息。我有四个独立的 IP 流现在进入四个具有良好延迟的 winforms 面板(仍在测试玻璃到玻璃)。

为了确保(某种程度上)调整了延迟,我不得不深入研究 uridecodebin 的源设置信号,并假设源的类型为 rtspsrc,然后设置它的 "latency" 属性 .下面的代码不验证源类型,因此 YMMV 并且您可能会在此处遇到异常。

请参阅下面的 class 源代码,它适用于我(针对 x64 编译)。

希望这对那里的任何人都有帮助。

现在开始使用 appsink!! :)

/// <summary>
/// class to create a gstreamer pipeline based on an rtsp stream at the provided URL
/// </summary>
class gstPipeline2
{
    // elements for the pipeline
    private Element uriDecodeBin, videoSink;
    private System.Threading.Thread mainGLibThread;
    private GLib.MainLoop mainLoop;

    // the window handle (passed in)
    private IntPtr windowHandle;
    // our pipeline
    private Pipeline currentPipeline = null;

    /// <summary>
    /// Create a new gstreamer pipeline rendering the stream at URL into the provided window handle 
    /// </summary>
    /// <param name="WindowHandle">The handle of the window to render to </param>
    /// <param name="Url">The url of the video stream</param>
    public gstPipeline2(string Url, IntPtr WindowHandle)
    {
        windowHandle = WindowHandle;    // get the handle and save it locally

        // initialise the gstreamer library and associated threads (for diagnostics)
        Gst.Application.Init();

        mainLoop = new GLib.MainLoop();
        mainGLibThread = new System.Threading.Thread(mainLoop.Run);
        mainGLibThread.Start();

        // create each element now for the pipeline
        uriDecodeBin = ElementFactory.Make("uridecodebin", "uriDecodeBin0");  // create an uridecodebin (which handles most of the work for us!!)
        uriDecodeBin["uri"] = Url;   // and set its location (the source of the data)
        videoSink = ElementFactory.Make("autovideosink", "sink0");  // and finally the sink to render the video (redirected to the required window handle below in Bus_SyncMessage() ) 

        // create our pipeline which links all the elements together into a valid data flow
        currentPipeline = new Pipeline("pipeline");
        currentPipeline.Add(uriDecodeBin, videoSink); // add the required elements into it

        uriDecodeBin.PadAdded += uriDecodeBin_PadAdded; // subscribe to the PadAdded event so we can link new pads (sources of data?) to the depayloader when they arrive
        uriDecodeBin.Connect("source-setup", SourceSetup);  // subscribe to the "source-setup" signal, not quite done in the usual C# eventing way but treat it as essentially the same

        // subscribe to the messaging system of the bus and pipeline so we can monitor status as we go
        Bus bus = currentPipeline.Bus;
        bus.AddSignalWatch();
        bus.Message += Bus_Message;

        bus.EnableSyncMessageEmission();
        bus.SyncMessage += Bus_SyncMessage;

        // finally set the state of the pipeline running so we can get data
        var setStateReturn = currentPipeline.SetState(State.Null);
        System.Diagnostics.Debug.WriteLine("SetStateNULL returned: " + setStateReturn.ToString());
        setStateReturn = currentPipeline.SetState(State.Ready);
        System.Diagnostics.Debug.WriteLine("SetStateReady returned: " + setStateReturn.ToString());
        setStateReturn = currentPipeline.SetState(State.Playing);
        System.Diagnostics.Debug.WriteLine("SetStatePlaying returned: " + setStateReturn.ToString());
    }

    private void uriDecodeBin_PadAdded(object o, PadAddedArgs args)
    {
        System.Diagnostics.Debug.WriteLine("uriDecodeBin_PadAdded: called with new pad named: " + args.NewPad.Name);

        // a pad has been added to the source so we need to link it to the rest of the pipeline to ultimately display it onscreen
        Pad sinkPad = videoSink.GetStaticPad("sink");   // get the pad for the one we have recieved  so we can link to the depayloader element
        System.Diagnostics.Debug.WriteLine("uriDecodeBin_PadAdded: queue pad returned: " + sinkPad.Name);

        PadLinkReturn ret = args.NewPad.Link(sinkPad);

        System.Diagnostics.Debug.WriteLine("uriDecodeBin_PadAdded: link attempt returned: " + ret.ToString());
    }

    void SourceSetup(object sender, GLib.SignalArgs args)
    {
        // we need to delve into the source portion of the uridecodebin to modify the "latency" property, need to add some validation here to ensure this is an rtspsrc
        var source = (Element)args.Args[0];
        System.Diagnostics.Debug.WriteLine("SourceSetup: source is named: " + source.Name + ", and is of type: " + source.NativeType.ToString());
        source["latency"] = 0;  // this COULD throw an exception if the source is not rtspsrc or similar with a "latency" property
    }

    public void killProcess()
    {
        mainLoop.Quit();
    }

    private void Bus_SyncMessage(object o, SyncMessageArgs args)
    {
        if (Gst.Video.Global.IsVideoOverlayPrepareWindowHandleMessage(args.Message))
        {
            System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: Message prepare window handle received by: " + args.Message.Src.Name + " " + args.Message.Src.GetType().ToString());

            if (args.Message.Src != null)
            {
                // these checks were in the testvideosrc example and failed, args.Message.Src is always Gst.Element???
                if (args.Message.Src is Gst.Video.VideoSink)
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is VideoSink");
                else
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is NOT VideoSink");

                if (args.Message.Src is Gst.Bin)
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is Bin");
                else
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: source is NOT Bin");

                try
                {
                    args.Message.Src["force-aspect-ratio"] = true;
                }
                catch (PropertyNotFoundException) { }

                try
                {
                    Gst.Video.VideoOverlayAdapter adapter = new VideoOverlayAdapter(args.Message.Src.Handle);
                    adapter.WindowHandle = windowHandle;
                    adapter.HandleEvents(true);
                    System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: Handle passed to adapter: " + windowHandle.ToString());
                }
                catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: Exception Thrown (overlay stage): " + ex.Message); }
            }
        }
        else
        {
            string info;
            IntPtr prt;
            args.Message.ParseInfo(out prt, out info);
            System.Diagnostics.Debug.WriteLine("Bus_SyncMessage: " + args.Message.Type.ToString() + " - " + info);
        }
    }

    private void Bus_Message(object o, MessageArgs args)
    {
        var msg = args.Message;
        //System.Diagnostics.Debug.WriteLine("HandleMessage received msg of type: {0}", msg.Type);
        switch (msg.Type)
        {
            case MessageType.Error:
                //
                GLib.GException err;
                string debug;
                System.Diagnostics.Debug.WriteLine("Bus_Message: Error received: " + msg.ToString());
                break;
            case MessageType.StreamStatus:
                Gst.StreamStatusType status;
                Element theOwner;
                msg.ParseStreamStatus(out status, out theOwner);
                System.Diagnostics.Debug.WriteLine("Bus_Message: Case SteamingStatus: status is: " + status + " ; Owner is: " + theOwner.Name);
                break;
            case MessageType.StateChanged:
                State oldState, newState, pendingState;
                msg.ParseStateChanged(out oldState, out newState, out pendingState);
                if (newState == State.Paused)
                    args.RetVal = false;
                System.Diagnostics.Debug.WriteLine("Bus_Message: Pipeline state changed from {0} to {1}: ; Pending: {2}", Element.StateGetName(oldState), Element.StateGetName(newState), Element.StateGetName(pendingState));
                break;
            case MessageType.Element:
                System.Diagnostics.Debug.WriteLine("Bus_Message: Element message: {0}", args.Message.ToString());
                break;
            default:
                System.Diagnostics.Debug.WriteLine("Bus_Message: HandleMessage received msg of type: {0}", msg.Type);
                break;
        }
        args.RetVal = true;
    }
}