如何使用Gstreamer和Python进行RTSP视频流?

3
我有一段代码,目前它可以使用Python的gstreamer绑定在屏幕上显示一个视频。当播放器打开后,我可以通过点击“向前”按钮来快进视频,这对我来说非常重要,因此我不想使用解析器编写管道并将其发送到gst-launch。
现在我想做的是将此视频流不仅发送到新打开的窗口,而且(如果不能同时实现)还可以通过RTSP发送到VLC或甚至其他局域网客户端中打开。有没有办法实现这一点?
很抱歉,代码很长,但是请看下面:
import sys, os, time
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject, Gtk
from gi.repository import GdkX11, GstVideo

class GTK_Main(object):

    def __init__(self):
        window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
        window.set_title("Vorbis-Player")
        window.set_default_size(500, -1)
        window.connect("destroy", Gtk.main_quit, "WM destroy")
        vbox = Gtk.VBox()
        window.add(vbox)
        self.entry = Gtk.Entry()
        vbox.pack_start(self.entry, False, False, 0)
        hbox = Gtk.HBox()
        vbox.add(hbox)
        buttonbox = Gtk.HButtonBox()
        hbox.pack_start(buttonbox, False, False, 0)
        rewind_button = Gtk.Button("Rewind")
        rewind_button.connect("clicked", self.rewind_callback)
        buttonbox.add(rewind_button)
        self.button = Gtk.Button("Start")
        self.button.connect("clicked", self.start_stop)
        buttonbox.add(self.button)
        forward_button = Gtk.Button("Forward")
        forward_button.connect("clicked", self.forward_callback)
        buttonbox.add(forward_button)
        self.time_label = Gtk.Label()
        self.time_label.set_text("00:00 / 00:00")
        hbox.add(self.time_label)
        window.show_all()

        self.player = Gst.ElementFactory.make("playbin", "player")
        bus = self.player.get_bus()
        bus.add_signal_watch()
        bus.enable_sync_message_emission()
        bus.connect("message", self.on_message)
        bus.connect("sync-message::element", self.on_sync_message)

    def start_stop(self, w):
        if self.button.get_label() == "Start":
            filepath = self.entry.get_text().strip()
            if os.path.isfile(filepath):
                filepath = os.path.realpath(filepath)
                self.butto

    n.set_label("Stop")
                    self.player.set_property("uri", "file://" + filepath)
                    self.player.set_state(Gst.State.PLAYING)
                    time.sleep(1)
                    self.forward_callback(60)
                else:
                    self.player.set_state(Gst.State.NULL)
                    self.button.set_label("Start")

        def on_message(self, bus, message):
            t = message.type
            if t == Gst.MessageType.EOS:
                self.player.set_state(Gst.State.NULL)
                self.button.set_label("Start")
            elif t == Gst.MessageType.ERROR:
                self.player.set_state(Gst.State.NULL)
                err, debug = message.parse_error()
                print ("Error: %s" % err, debug)
                self.button.set_label("Start")

        def on_sync_message(self, bus, message):
            if message.get_structure().get_name() == 'prepare-window-handle':
                imagesink = message.src
                imagesink.set_property("force-aspect-ratio", True)
                imagesink.set_window_handle(self.movie_window.get_property('window').get_xid())

        def rewind_callback(self, w):
            rc, pos_int = self.player.query_position(Gst.Format.TIME)
            seek_ns = pos_int - 10 * 1000000000
            if seek_ns < 0:
                seek_ns = 0
            print ("Backward: %d ns -> %d ns" % (pos_int, seek_ns))
            self.player.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, seek_ns)

        def forward_callback(self, w):
            rc, pos_int = self.player.query_position(Gst.Format.TIME)
            if type(w) == int:
                seek_ns = w * 1000000000
            else:
                seek_ns = pos_int + 10 * 1000000000
            print ("Forward: %d ns -> %d ns" % (pos_int, seek_ns))
            self.player.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, seek_ns)

        def convert_ns(self, t):
            # This method was submitted by Sam Mason.
            # It's much shorter than the original one.
            s,ns = divmod(t, 1000000000)
            m,s = divmod(s, 60)

            if m < 60:
                return "%02i:%02i" %(m,s)
            else:
                h,m = divmod(m, 60)
                return "%i:%02i:%02i" %(h,m,s)


    GObject.threads_init()
    Gst.init(None)        
    GTK_Main()
    Gtk.main()

我在这个教程中找到了这段代码。

1
我认为这个链接有你想要的解决方案:https://dev59.com/rVYN5IYBdhLWcg3wdn8Y - Vítor Cézar
我已经成功运行了这篇文章的解决方案,问题在于opencv帧的读取。流视频存在重叠帧,但是与gstrtsp服务器一起使用的appsrc部分可能是正确的解决方法......我会再试一次。 - João Gondim
1个回答

3

所以,最终我成功让提供的链接工作了。我的问题在于视频的宽度和高度必须与您想要播放的视频具有完全相同的值,可能与opencv帧传递有关...另外,“is-live”属性由于使用链接解决方案中的网络摄像头而设置为true,必须设置为false(或根本不使用,因为这是默认值),否则视频将以黑屏的某种延迟开始。代码最终如下:

import cv2
import gi

gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject


class SensorFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, **properties):
        super(SensorFactory, self).__init__(**properties)
        self.cap = cv2.VideoCapture("path/to/video")
        self.number_frames = 0
        self.fps = 8
        self.duration = 1 / self.fps * Gst.SECOND  # duration of a frame in nanoseconds
        self.launch_string = 'appsrc name=source block=true format=GST_FORMAT_TIME ' \
                             'caps=video/x-raw,format=BGR,width=1280,height=720,framerate={}/1 ' \
                             '! videoconvert ! video/x-raw,format=I420 ' \
                             '! x264enc speed-preset=ultrafast tune=zerolatency ! queue ' \
                             '! rtph264pay config-interval=1 name=pay0 pt=96 '.format(self.fps)
        # streams to gst-launch-1.0 rtspsrc location=rtsp://localhost:8554/test latency=50 ! decodebin ! autovideosink

    def on_need_data(self, src, lenght):
        if self.cap.isOpened():
            ret, frame = self.cap.read()
            if ret:
                data = frame.tostring()
                #print(data)
                buf = Gst.Buffer.new_allocate(None, len(data), None)
                buf.fill(0, data)
                buf.duration = self.duration
                timestamp = self.number_frames * self.duration
                buf.pts = buf.dts = int(timestamp)
                buf.offset = timestamp
                self.number_frames += 1
                retval = src.emit('push-buffer', buf)
                #print('pushed buffer, frame {}, duration {} ns, durations {} s'.format(self.number_frames,
                #                                                                       self.duration,
                #                                                                       self.duration / Gst.SECOND))
                if retval != Gst.FlowReturn.OK:
                    print(retval)

    def do_create_element(self, url):
        return Gst.parse_launch(self.launch_string)

    def do_configure(self, rtsp_media):
        self.number_frames = 0
        appsrc = rtsp_media.get_element().get_child_by_name('source')
        appsrc.connect('need-data', self.on_need_data)


class GstServer(GstRtspServer.RTSPServer):
    def __init__(self, **properties):
        super(GstServer, self).__init__(**properties)
        self.factory = SensorFactory()
        self.factory.set_shared(True)
        self.get_mount_points().add_factory("/test", self.factory)
        self.attach(None)


GObject.threads_init()
Gst.init(None)

server = GstServer()

loop = GObject.MainLoop()
loop.run()

看起来你正在覆盖一个方法 RTSPMediaFactory.do_configure(),但似乎没有任何文档记录它。你知道在哪里可以找到更多关于这个的信息吗? - balu
@balu,这个覆盖是由这个链接中的人完成的:https://dev59.com/rVYN5IYBdhLWcg3wdn8Y也许你可以在GStreamer文档中找到一些信息。 - João Gondim
谢谢,我会在那里问的! - balu

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接