Is it possible to stream an existing gstreamer pipeline through gstrtspserver - python

So I have a full working pipeline done in python that I am currently just dumping into an autovideosrc. I want to be able to stream through RTSP so I've been looking at gstrtspserver but from what I can tell gstrtspserver only uses a gst_parse_launch like function to construct its pipeline, such as this example
import gi
gi.require_version('Gst','1.0')
gi.require_version('GstRtspServer','1.0')
from gi.repository import GLib, Gst, GstRtspServer
Gst.init(None)
mainloop = GLib.MainLoop()
server = GstRtspServer.RTSPServer()
mounts = server.get_mount_points()
factory = GstRtspServer.RTSPMediaFactory()
factory.set_launch('( videotestsrc pattern=ball is-live=1 ! x264enc speed-preset=ultrafast tune=zerolatency ! rtph264pay name=pay0 pt=96 )')
mounts.add_factory("/test", factory)
server.attach(None)
print("stream ready at rtsp://127.0.0.1:8554/test")
mainloop.run()
Is there a way to stream an existing pipeline through RTSP?

You could use gst_rtsp_media_take_pipeline (), which takes an existing pipeline.
You would have to derive a class from GstRTSPMediaFactory to return that new GstRTSPMedia type.

Related

GStreamer, Tee in RTSP server

New to Gstreamer, trying to create an RTSP server that consumes a source once per output stream. Code follows. In the factory do_create_element(), if I use the source_pipeline as a return value, I am able to connect to the server and consume the stream. However, as expected, if I connect multiple clients, the server crashes with an error saying the src pad is already connected. My idea was to use the tee element, add a queue after it, and wrap the queue in a pipeline (since I need a GstBin element to give back from the factory). However, this does not work. When I connect a first client to server, nothing happens server-side (as-in, no crashes. The function is called and exits successfully). However, client side, no stream is read, with a 'SDP contains no stream error' (see the console output after the code).
I tried connecting the tee manually to the queue (line left in comment) instead of using the pipeline to do it, but nothing changes. I suspect the queue needs something to consume it, but I'm not sure how/what to use (or if that's the actual issue).
Here is a minimal example:
import gi
gi.require_version("Gst", "1.0")
gi.require_version("GstRtspServer", "1.0")
# noinspection PyUnresolvedReferences
from gi.repository import Gst, GstRtspServer, GObject
def create_source():
s_src = "videotestsrc ! video/x-raw,rate=30,width=320,height=240,format=I420"
s_h264 = "x264enc tune=zerolatency"
pipeline_str = "( {s_src} ! queue max-size-buffers=1000 name=q_enc ! {s_h264} ! rtph264pay name=pay0 pt=96 )".format(
**locals()
)
return Gst.parse_launch(pipeline_str)
def _main():
GObject.threads_init()
Gst.init(None)
source = create_source()
tee = Gst.ElementFactory.make("tee", "tee")
source_pipeline = Gst.Pipeline()
source_pipeline.add(source)
source_pipeline.add(tee)
source_pipeline.set_state(Gst.State.PLAYING)
class Factory(GstRtspServer.RTSPMediaFactoryURI):
def __init__(self):
super(Factory, self).__init__()
self._n = 0
def do_create_element(self, url): # -> GstBin (Pipeline)
print("in [Factory] do_create_element")
source_pipeline.set_state(Gst.State.PAUSED)
stream_pipeline = Gst.Pipeline()
# Create a new queue to buffer the tee's output
q = Gst.ElementFactory.make("queue")
stream_pipeline.add(q)
source_pipeline.add(tee)
source_pipeline.add(stream_pipeline)
# tee.link(q)
stream_pipeline.set_state(Gst.State.PLAYING)
source_pipeline.set_state(Gst.State.PLAYING)
return stream_pipeline
factory = Factory()
factory.set_shared(True)
server = GstRtspServer.RTSPServer()
server.get_mount_points().add_factory("/endpoint", factory)
server.attach(None)
loop = GObject.MainLoop()
loop.run()
if __name__ == "__main__":
_main()
Here is the shell output when connecting:
> gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/endpoint ! rtph264depay ! h264parse ! mp4mux ! filesink location=file.mp4
Setting pipeline to PAUSED ...
Pipeline is live and does not need PREROLL ...
Progress: (open) Opening Stream
Pipeline is PREROLLED ...
Prerolled, waiting for progress to finish...
Progress: (connect) Connecting to rtsp://127.0.0.1:8554/endpoint
Progress: (open) Retrieving server options
Progress: (open) Retrieving media info
ERROR: from element /GstPipeline:pipeline0/GstRTSPSrc:rtspsrc0: Could not get/set settings from/on resource.
Additional debug info:
../gst/rtsp/gstrtspsrc.c(7637): gst_rtspsrc_setup_streams_start (): /GstPipeline:pipeline0/GstRTSPSrc:rtspsrc0:
SDP contains no streams
ERROR: pipeline doesn't want to preroll.
Setting pipeline to NULL ...
Freeing pipeline ...
Thanks ahead!

Problem with GStreamer and WPEWebKit in Python3

I want to create a program in Python that allows to edit text that moves and show on top of a video. I want to do that in a website overlay (WPEWebKit).
I have found some code to start a pipeline with wpe and gst-launch-1.0, problem is that when I run gst-launch-1.0 -v wpesrc location="https://gstreamer.freedesktop.org" ! queue ! glimagesink from https://gstreamer.freedesktop.org/documentation/wpe/wpesrc.html it says WARNING: erroneous pipeline: no element "wpesrc". I have checked and I have installed gstreamer1.0-plugins-bad.
The other challange is that I want to create it in a python application. Here's my code so far:
import threading
import time
import gi
gi.require_version("Gst", "1.0")
from gi.repository import Gst, GLib
Gst.init()
main_loop = GLib.MainLoop()
thread = threading.Thread(target=main_loop.run)
thread.start()
pipeline = Gst.parse_launch("videotestsrc ! decodebin ! videoconvert ! autovideosink")
pipeline.set_state(Gst.State.PLAYING)
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
pass
pipeline.set_state(Gst.State.NULL)
main_loop.quit()
I will be probably rewrite this because I want it in a GTK application.

Where are Gstreamer bus log messages?

I am trying to stream a .mp4 to a RTSP server using Gstreamer in python
import sys
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
gi.require_version('GstRtsp', '1.0')
from gi.repository import Gst, GstRtspServer, GObject, GLib, GstRtsp
loop = GLib.MainLoop()
Gst.init(None)
file_path = "test.mp4"
class TestRtspMediaFactory(GstRtspServer.RTSPMediaFactory):
def __init__(self):
GstRtspServer.RTSPMediaFactory.__init__(self)
def do_create_element(self, url):
src_demux = f"filesrc location={file_path} ! qtdemux name=demux"
h264_transcode = "demux.video_0"
pipeline = "{0} {1} ! queue ! rtph264pay name=pay0 config-interval=1 pt=96".format(src_demux, h264_transcode)
print ("Element created: " + pipeline)
self._pipeline = Gst.parse_launch(pipeline)
def bus_handler(bus, message):
print(message)
self.bus = self._pipeline.get_bus()
self.bus.connect('message', bus_handler)
self.bus.add_signal_watch_full(1)
return self._pipeline
class GstreamerRtspServer():
def __init__(self):
self.rtspServer = GstRtspServer.RTSPServer()
factory = TestRtspMediaFactory()
factory.set_shared(True)
mountPoints = self.rtspServer.get_mount_points()
self.address = '127.0.0.1' #my RPi's local IP
self.port = '8553'
self.rtspServer.set_address(self.address)
self.rtspServer.set_service(self.port)
urlstr = "/user=&password=.sdp"
url = GstRtsp.RTSPUrl.parse(urlstr)
mountPoints.add_factory(urlstr, factory)
self.rtspServer.attach(None)
if __name__ == '__main__':
s = GstreamerRtspServer()
loop.run()
However I am trying to understand how to use Gstreamer bus to log messages like eos or errors and warnings but I don't see any, even when I send eos events and the streaming effectively stops
s.rtspServer._pipeline._end_stream_event.set()
s.rtspServer._pipeline.send_event(Gst.Event.new_eos())
Am I using it properly? If not, what can I fix to properly log bus messages?
Following solution is based on this accepted but somehow incomplete answer.
I found out the way that does not require "manual" creation of pipeline elements but instead it keeps (in this scenario) convenient Gst.parse_launch(pipelineCmd) method and extends Gst.Bin to enable message debugging.
Here is full example source code (check out commented lines for some explanations):
#!/usr/bin/env python
import sys
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject, GLib
Gst.init(None)
loop = GLib.MainLoop()
# extended Gst.Bin that overrides do_handle_message and adds debugging
class ExtendedBin(Gst.Bin):
def do_handle_message(self,message):
if message.type == Gst.MessageType.ERROR:
error, debug = message.parse_error()
print("ERROR:", message.src.get_name(), ":", error.message)
if debug:
print ("Debug info: " + debug)
elif message.type == Gst.MessageType.EOS:
print ("End of stream")
elif message.type == Gst.MessageType.STATE_CHANGED:
oldState, newState, pendingState = message.parse_state_changed()
print ("State changed -> old:{}, new:{}, pending:{}".format(oldState,newState,pendingState))
else :
print("Some other message type: " + str(message.type))
#call base handler to enable message propagation
Gst.Bin.do_handle_message(self,message)
class TestRtspMediaFactory(GstRtspServer.RTSPMediaFactory):
def __init__(self):
GstRtspServer.RTSPMediaFactory.__init__(self)
def do_create_element(self, url):
#set mp4 file path to filesrc's location property
src_demux = "filesrc location=/path/to/dir/test.mp4 ! qtdemux name=demux"
h264_transcode = "demux.video_0"
#uncomment following line if video transcoding is necessary
#h264_transcode = "demux.video_0 ! decodebin ! queue ! x264enc"
pipelineCmd = "{0} {1} ! queue ! rtph264pay name=pay0 config-interval=1 pt=96".format(src_demux, h264_transcode)
self.pipeline = Gst.parse_launch(pipelineCmd)
print ("Pipeline created: " + pipelineCmd)
# creates extended Gst.Bin with message debugging enabled
extendedBin = ExtendedBin("extendedBin")
# Gst.pipeline inherits Gst.Bin and Gst.Element so following is possible
extendedBin.add(self.pipeline)
# creates new Pipeline and adds extended Bin to it
self.extendedPipeline = Gst.Pipeline.new("extendedPipeline")
self.extendedPipeline.add(extendedBin)
return self.extendedPipeline
class GstreamerRtspServer(GstRtspServer.RTSPServer):
def __init__(self):
self.rtspServer = GstRtspServer.RTSPServer()
self.factory = TestRtspMediaFactory()
self.factory.set_shared(True)
mountPoints = self.rtspServer.get_mount_points()
mountPoints.add_factory("/stream", self.factory)
self.rtspServer.attach(None)
print ("RTSP server is ready")
if __name__ == '__main__':
s = GstreamerRtspServer()
loop.run()
Please note that Gst.Pipeline actually inherists/extends Gst.Bin (and Gst.Element) so it is possible (no matter how strange it sounds) to add pipeline to bin.
This little "trick" saves time for us "lazy" programmers to keep using parsing of command line syntax to create pipeline elements.
In some more complex scenarios, where parsing of command line syntax is not applicable, solution would be following:
create ExtendedBin,
"manually" create elements with Gst.ElementFactory.make method (and set necessary properties)
add created elements to ExtendedBean
link elements
create new pipeline and add bin to it
use pipeline where it is needed.

convert gstreamer pipeline to python code

Im trying to convert gstreamer pipeline to python code using gi library.
This is the pipeline which is running successfully in terminal:
gst-launch-1.0 rtspsrc location="rtsp://admin:123456#192.168.0.150:554/H264?ch=1&subtype=0&proto=Onvif" latency=300 ! rtph264depay ! h264parse ! nvv4l2decoder drop-frame-interval=1 ! nvvideoconvert ! video/x-raw,width=1920,height=1080,formate=I420 ! queue ! nveglglessink window-x=0 window-y=0 window-width=1080 window-height=720
but while running the same pipeline using python code, there is no output window displaying rtsp stream and also no error on the terminal. The terminal simply stuck until i press ctrl+c.
This is the code that im using to run the gstreamer command:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import Gst, GObject
def main(device):
GObject.threads_init()
Gst.init(None)
pipeline = Gst.Pipeline()
source = Gst.ElementFactory.make("rtspsrc", "video-source")
source.set_property("location", device)
source.set_property("latency", 300)
pipeline.add(source)
depay = Gst.ElementFactory.make("rtph264depay", "depay")
pipeline.add(depay)
source.link(depay)
parse = Gst.ElementFactory.make("h264parse", "parse")
pipeline.add(parse)
depay.link(parse)
decoder = Gst.ElementFactory.make("nvv4l2decoder", "decoder")
decoder.set_property("drop-frame-interval", 2)
pipeline.add(decoder)
parse.link(decoder)
convert = Gst.ElementFactory.make("nvvideoconvert", "convert")
pipeline.add(convert)
decoder.link(convert)
caps = Gst.Caps.from_string("video/x-raw,width=1920,height=1080,formate=I420")
filter = Gst.ElementFactory.make("capsfilter", "filter")
filter.set_property("caps", caps)
pipeline.add(filter)
convert.link(filter)
queue = Gst.ElementFactory.make("queue", "queue")
pipeline.add(queue)
filter.link(queue)
sink = Gst.ElementFactory.make("nveglglessink", "video-sink")
sink.set_property("window-x", 0)
sink.set_property("window-y", 0)
sink.set_property("window-width", 1280)
sink.set_property("window-height", 720)
pipeline.add(sink)
queue.link(sink)
loop = GObject.MainLoop()
pipeline.set_state(Gst.State.PLAYING)
try:
loop.run()
except:
pass
pipeline.set_state(Gst.State.NULL)
if __name__ == "__main__":
main("rtsp://admin:123456#192.168.0.150:554/H264?ch=1&subtype=0&proto=Onvif")
Does anyone know what is the mistake? Thank you!
The reason it doesn't work is because rtspsrc's source pad is a so-called "Sometimes pad". The link here explains it quite well, but basically you cannot know upfront how many pads will become available on the rtspsrc, since this depends on the SDP provided by the RTSP server.
As such, you should listen to the "pad-added" signal of the rtspsrc, where you can link the rest of your pipeline to the source pad that just showed up in the callback.
So summarised:
def main(device):
GObject.threads_init()
Gst.init(None)
pipeline = Gst.Pipeline()
source = Gst.ElementFactory.make("rtspsrc", "video-source")
source.set_property("location", device)
source.set_property("latency", 300)
source.connect("pad-added", on_rtspsrc_pad_added)
pipeline.add(source)
# We will add/link the rest of the pipeline later
loop = GObject.MainLoop()
pipeline.set_state(Gst.State.PLAYING)
try:
loop.run()
except:
pass
pipeline.set_state(Gst.State.NULL)
def on_rtspsrc_pad_added(rtspsrc, pad, *user_data):
# Create the rest of your pipeline here and link it
depay = Gst.ElementFactory.make("rtph264depay", "depay")
pipeline.add(depay)
rtspsrc.link(depay)
# and so on ....

Python Gstreamer record audio from mic and play immediately

I want to record audio from mic and play it immediately from same pc's speakers using gstreamer. In other words; make a wire between input and output record a
few samples and play them back immediately. I can record audio to an ogg file with this code:
#!/usr/bin/env python
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst, Gtk
GObject.threads_init()
Gst.init(None)
pipeline = Gst.Pipeline()
autoaudiosrc = Gst.ElementFactory.make("autoaudiosrc", "autoaudiosrc")
audioconvert = Gst.ElementFactory.make("audioconvert", "audioconvert")
vorbisenc = Gst.ElementFactory.make("vorbisenc", "vorbisenc")
oggmux = Gst.ElementFactory.make("oggmux", "oggmux")
filesink = Gst.ElementFactory.make("filesink", "filesink")
url = "1.ogg"
filesink.set_property("location",url)
pipeline.add( autoaudiosrc)
pipeline.add( audioconvert)
pipeline.add( vorbisenc)
pipeline.add( oggmux)
pipeline.add( filesink)
autoaudiosrc.link( audioconvert)
audioconvert.link( vorbisenc)
vorbisenc.link( oggmux)
oggmux.link( filesink)
pipeline.set_state(Gst.State.PLAYING)
Gtk.main()
But how can i play the audio while recording?
After audioconvert, you can add a tee and queue to have a new branch.
You can have something like that:
autoaudiosrc ! audioconvert ! tee name="source" ! queue ! vorbisenc ! oggmux ! filesink location=file.ogg source. ! queue ! audioconvert ! alsasink

Categories