h.264 - Playing RTSP with python-gstreamer -
i use gstreamer playing rtsp stream ip cameras (like axis.) use command line this:
gst-launch-0.10 rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp latency=0 ! decodebin ! autovideosink
and work fine.
i want control gui in pygtk use gstreamer python bindings. i've wrote piece of code:
[...] self.player = gst.pipeline("player") source = gst.element_factory_make("rtspsrc", "source") source.set_property("location", "rtsp://192.168.0.127/axis-media/media.amp") decoder = gst.element_factory_make("decodebin", "decoder") sink = gst.element_factory_make("autovideosink", "sink") self.player.add(source, decoder, sink) gst.element_link_many(source, decoder, sink) bus = self.player.get_bus() bus.add_signal_watch() bus.enable_sync_message_emission() bus.connect("message", self.on_message) bus.connect("sync-message::element", self.on_sync_message) [...]
but doesn't work , quit message:
gst.element_link_many(source, decoder,sink) gst.linkerror: failed link source decoder
i've try improve cli use h264:
gst-launch-0.10 -v rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp ! rtph264depay ! ffdec_h264 ! xvimagesink
and implement in python code that:
[...] self.player = gst.pipeline("player") source = gst.element_factory_make("rtspsrc", "source") depay = gst.element_factory_make("rtph264depay", "depay") decoder = gst.element_factory_make("ffdec_h264", "decoder") sink = gst.element_factory_make("xvimagesink", "output") self.player.add(source, depay, decoder, sink) gst.element_link_many(source, depay, decoder, sink) [...]
but got same error :(
gst.linkerror: failed link source depay
there wrong between source (rtspsrc), work decodebin filesrc (don't work of course rtph264depay)
i don't understand why doesn't work because work in cli. experts of gstreamer can me ?
thanks in advance.
regards,
i have "c" implementation of code looking for. think should simple convert "python"
//display rtsp streaming of video //(c) 2011 enthusiasticgeek // code distributed in hope useful, // without warranty; without implied warranty of // merchantability or fitness particular purpose. #include <string.h> #include <math.h> #include <gst/gst.h> #include <glib.h> static gboolean bus_call (gstbus *bus,gstmessage *msg, gpointer data){ gmainloop *loop = (gmainloop *) data; switch (gst_message_type (msg)) { case gst_message_eos: g_print ("stream ends\n"); g_main_loop_quit (loop); break; case gst_message_error: { gchar *debug; gerror *error; gst_message_parse_error (msg, &error, &debug); g_free (debug); g_printerr ("error: %s\n", error->message); g_error_free (error); g_main_loop_quit (loop); break; } default: break; } return true; } static void on_pad_added (gstelement *element, gstpad *pad, gpointer data){ gstpad *sinkpad; gstelement *decoder = (gstelement *) data; /* can link pad rtsp-decoder sink pad */ g_print ("dynamic pad created, linking source/demuxer\n"); sinkpad = gst_element_get_static_pad (decoder, "sink"); gst_pad_link (pad, sinkpad); gst_object_unref (sinkpad); } int main (int argc, char *argv[]) { gmainloop *loop; gstbus *bus; gstelement *source; gstelement *decoder; gstelement *sink; gstelement *pipeline; gstelement *demux; gstelement *colorspace; /* initializing gstreamer */ gst_init (&argc, &argv); loop = g_main_loop_new (null, false); //gst-launch-0.10 rtspsrc location=rtsp://<ip> ! decodebin ! ffmpegcolorspace ! autovideosink //gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! mpeg4videoparse ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink //gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink /* create pipe's elements */ pipeline = gst_pipeline_new ("video player"); g_assert (pipeline); source = gst_element_factory_make ("rtspsrc", "source"); g_assert (source); demux = gst_element_factory_make ("rtpmp4vdepay", "depay"); g_assert (demux); decoder = gst_element_factory_make ("ffdec_mpeg4", "decoder"); g_assert (decoder); colorspace = gst_element_factory_make ("ffmpegcolorspace", "colorspace"); g_assert(colorspace); sink = gst_element_factory_make ("autovideosink", "output"); g_assert (sink); /*make sure: every elements created ok*/ if (!pipeline || !source || !demux || !decoder || !colorspace || !sink) { g_printerr ("one of elements wasn't create... exiting\n"); return -1; } g_printf(" \npipeline part(a) ->(dynamic/runtime link) part(b)[ part(b-1) -> part(b-2) -> part(b-3) ]\n\n"); g_printf(" [source](dynamic)->(dynamic)[demux]->[decoder]->[colorspace]->[videosink] \n\n"); /* set video source */ g_object_set (g_object (source), "location", argv[1], null); //g_object_set (g_object (source), "do-rtcp", true, null); g_object_set (g_object (source), "latency", 0, null); /* putting message handler */ bus = gst_pipeline_get_bus (gst_pipeline (pipeline)); gst_bus_add_watch (bus, bus_call, loop); gst_object_unref (bus); /* add elements bin */ gst_bin_add_many (gst_bin (pipeline), source, demux, decoder, colorspace, sink, null); /* link confirmation */ if (!gst_element_link_many (demux, decoder, colorspace, sink, null)){ g_warning ("linking part (b) fail..."); } g_printf("\nnote source linked demuxer(depayload) dynamically.\n\ reason rtspsrc may contain various elements (for example\n\ audio , video). source pad(s) created @ run time,\n\ rtspsrc when detects amount , nature of elements.\n\ therefore connect callback function executed\n\ when \"pad-added\" emitted.\n"); /* dynamic pad creation */ if(! g_signal_connect (source, "pad-added", g_callback (on_pad_added),demux)) { g_warning ("linking part (a) part (b) fail..."); } /* run pipeline */ g_print ("playing: %s\n", argv[1]); gst_element_set_state (pipeline, gst_state_playing); g_main_loop_run (loop); /* ending playback */ g_print ("end of streaming... ending playback\n"); gst_element_set_state (pipeline, gst_state_null); /* eliminating pipeline */ g_print ("eliminating pipeline\n"); gst_object_unref (gst_object (pipeline)); return 0; }
makefile
test = test12 ext = c cc = gcc cpp = g++ gstreamer: $(cc) -g $(test).$(ext) -o $(test) `pkg-config gstreamer-0.10 --libs --cflags` `pkg-config gtk+-2.0 --libs --cflags` clean: rm -rf $(test)
update
equivalent java code
// display rtsp streaming of video // (c) 2011 enthusiasticgeek // code distributed in hope useful, // without warranty; without implied warranty of // merchantability or fitness particular purpose // leave credits intact package video2; //replace package import java.awt.borderlayout; import java.awt.dimension; import javax.swing.jframe; import javax.swing.swingutilities; //import org.gstreamer.caps; import org.gstreamer.element; import org.gstreamer.elementfactory; import org.gstreamer.gst; import org.gstreamer.pad; import org.gstreamer.paddirection; import org.gstreamer.pipeline; import org.gstreamer.swing.videocomponent; /** * simple videotest example. */ public class main { public main() { } private static pipeline pipe; public static void main(string[] args) { // quartz abysmally slow @ scaling video reason, turn off. system.setproperty("apple.awt.graphics.usequartz", "false"); args = gst.init("swingvideotest", args); pipe = new pipeline("pipeline"); /* final element videosrc = elementfactory.make("videotestsrc", "source"); final element videofilter = elementfactory.make("capsfilter", "flt"); videofilter.setcaps(caps.fromstring("video/x-raw-yuv, width=720, height=576" + ", bpp=32, depth=32, framerate=25/1")); */ pipe.getbus().connect(new bus.error() { public void errormessage(gstobject source, int code, string message) { system.out.println("error occurred: " + message); gst.quit(); } }); pipe.getbus().connect(new bus.state_changed() { public void statechanged(gstobject source, state old, state current, state pending) { if (source == pipe) { system.out.println("pipeline state changed " + old + " " + current); } } }); pipe.getbus().connect(new bus.eos() { public void endofstream(gstobject source) { system.out.println("finished playing file"); gst.quit(); } }); pipe.getbus().connect(new bus.tag() { public void tagsfound(gstobject source, taglist taglist) { (string tag : taglist.gettagnames()) { system.out.println("found tag " + tag + " = " + taglist.getvalue(tag, 0)); } } }); final element source = elementfactory.make("rtspsrc", "source"); final element demux = elementfactory.make("rtpmp4vdepay", "depay"); final element decoder=elementfactory.make("ffdec_mpeg4", "decoder"); final element colorspace = elementfactory.make("ffmpegcolorspace", "colorspace"); //final element sink = elementfactory.make ("autovideosink", "output"); swingutilities.invokelater(new runnable() { public void run() { // create video component , link in videocomponent videocomponent = new videocomponent(); element videosink = videocomponent.getelement(); source.connect(new element.pad_added() { public void padadded(element element, pad pad) { pad.link(demux.getstaticpad("sink")); } }); pad p = new pad(null, paddirection.src); source.addpad(p); source.set("location","rtsp://<user>:<pass>@<ip>/mpeg4/1/media.amp"); //replace source pipe.addmany(source, demux, decoder, colorspace, videosink); element.linkmany(demux, decoder, colorspace, videosink); // create jframe display video output jframe frame = new jframe("swing video test"); frame.setdefaultcloseoperation(jframe.exit_on_close); frame.add(videocomponent, borderlayout.center); videocomponent.setpreferredsize(new dimension(720, 576)); frame.pack(); frame.setvisible(true); // start pipeline processing pipe.play(); } }); } }
Comments
Post a Comment