1 /*M///////////////////////////////////////////////////////////////////////////////////////
3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
5 // By downloading, copying, installing or using the software you agree to this license.
6 // If you do not agree to this license, do not download, install,
7 // copy or use the software.
10 // Intel License Agreement
11 // For Open Source Computer Vision Library
13 // Copyright (C) 2008, Nils Hasler, all rights reserved.
14 // Third party copyrights are property of their respective owners.
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
19 // * Redistribution's of source code must retain the above copyright notice,
20 // this list of conditions and the following disclaimer.
22 // * Redistribution's in binary form must reproduce the above copyright notice,
23 // this list of conditions and the following disclaimer in the documentation
24 // and/or other materials provided with the distribution.
26 // * The name of Intel Corporation may not be used to endorse or promote products
27 // derived from this software without specific prior written permission.
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
42 // Author: Nils Hasler <hasler@mpi-inf.mpg.de>
44 // Max-Planck-Institut Informatik
46 // this implementation was inspired by gnash's gstreamer interface
49 // use GStreamer to read a video
56 #include "gstappsink.h"
59 #define CV_WARN(message)
61 #define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
64 static bool isInited = false;
66 typedef struct CvCapture_GStreamer
69 int type; // one of [1394, v4l2, v4l, file]
73 GstElement *decodebin;
79 GstCaps *caps; // filter caps inserted right after the source
82 } CvCapture_GStreamer;
84 static void icvClose_GStreamer(CvCapture_GStreamer *cap)
87 gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_NULL);
88 gst_object_unref(GST_OBJECT(cap->pipeline));
92 gst_buffer_unref(cap->buffer);
95 cvReleaseImage(&cap->frame);
98 gst_caps_unref(cap->caps);
101 static void icvHandleMessage(CvCapture_GStreamer *cap)
103 GstBus* bus = gst_element_get_bus(cap->pipeline);
105 while(gst_bus_have_pending(bus)) {
106 GstMessage* msg = gst_bus_pop(bus);
108 // printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg));
110 switch (GST_MESSAGE_TYPE (msg)) {
111 case GST_MESSAGE_STATE_CHANGED:
112 GstState oldstate, newstate, pendstate;
113 gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
114 // printf("state changed from %d to %d (%d)\n", oldstate, newstate, pendstate);
116 case GST_MESSAGE_ERROR: {
119 gst_message_parse_error(msg, &err, &debug);
121 fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
122 gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
127 gst_element_set_state(cap->pipeline, GST_STATE_NULL);
131 case GST_MESSAGE_EOS:
132 // CV_WARN("NetStream has reached the end of the stream.");
136 // CV_WARN("unhandled message\n");
140 gst_message_unref(msg);
143 gst_object_unref(GST_OBJECT(bus));
147 // start the pipeline, grab a buffer, and pause again
149 static int icvGrabFrame_GStreamer(CvCapture_GStreamer *cap)
154 if(gst_app_sink_is_eos(GST_APP_SINK(cap->appsink))) {
155 //printf("end of stream\n");
160 gst_buffer_unref(cap->buffer);
162 icvHandleMessage(cap);
164 if(!gst_app_sink_get_queue_length(GST_APP_SINK(cap->appsink))) {
165 // printf("no buffers queued, starting pipeline\n");
167 if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PLAYING) ==
168 GST_STATE_CHANGE_FAILURE) {
169 icvHandleMessage(cap);
173 // icvHandleMessage(cap);
175 // // check whether stream contains an acceptable video stream
176 // GstPad *sinkpad = gst_element_get_pad(cap->colour, "sink");
177 // if(!GST_PAD_IS_LINKED(sinkpad)) {
178 // gst_object_unref(sinkpad);
179 // fprintf(stderr, "GStreamer: Pipeline is NOT ready. Format unknown?\n");
182 // gst_object_unref(sinkpad);
184 // printf("pulling preroll\n");
186 // if(!gst_app_sink_pull_preroll(GST_APP_SINK(cap->appsink))) {
187 // printf("no preroll\n");
191 // printf("pulling buffer\n");
193 cap->buffer = gst_app_sink_pull_buffer(GST_APP_SINK(cap->appsink));
195 // printf("pausing pipeline\n");
197 if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PAUSED) ==
198 GST_STATE_CHANGE_FAILURE) {
199 icvHandleMessage(cap);
203 // printf("pipeline paused\n");
205 // printf("peeking buffer, %d buffers in queue\n",
206 // gst_app_sink_get_queue_length(GST_APP_SINK(cap->appsink)));
207 cap->buffer = gst_app_sink_peek_buffer(GST_APP_SINK(cap->appsink));
213 // printf("pulled buffer %p\n", cap->buffer);
221 static IplImage *icvRetrieveFrame_GStreamer(CvCapture_GStreamer *cap, int)
226 // printf("getting buffercaps\n");
228 GstCaps* caps = gst_buffer_get_caps(cap->buffer);
230 assert(gst_caps_get_size(caps) == 1);
232 GstStructure* structure = gst_caps_get_structure(caps, 0);
234 gint bpp, endianness, redmask, greenmask, bluemask;
236 if(!gst_structure_get_int(structure, "bpp", &bpp) ||
237 !gst_structure_get_int(structure, "endianness", &endianness) ||
238 !gst_structure_get_int(structure, "red_mask", &redmask) ||
239 !gst_structure_get_int(structure, "green_mask", &greenmask) ||
240 !gst_structure_get_int(structure, "blue_mask", &bluemask)) {
241 printf("missing essential information in buffer caps, %s\n", gst_caps_to_string(caps));
245 //printf("buffer has %d bpp, endianness %d, rgb %x %x %x, %s\n", bpp, endianness, redmask, greenmask, bluemask, gst_caps_to_string(caps));
247 if(!redmask || !greenmask || !bluemask)
253 if(!gst_structure_get_int(structure, "width", &width) ||
254 !gst_structure_get_int(structure, "height", &height))
257 // printf("creating frame %dx%d\n", width, height);
259 cap->frame = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3);
262 gst_caps_unref(caps);
264 unsigned char *data = GST_BUFFER_DATA(cap->buffer);
266 //printf("generating shifts\n");
268 IplImage *frame = cap->frame;
269 unsigned nbyte = bpp >> 3;
270 unsigned redshift, blueshift, greenshift;
271 unsigned mask = redmask;
272 for(redshift = 0, mask = redmask; (mask & 1) == 0; mask >>= 1, redshift++)
274 for(greenshift = 0, mask = greenmask; (mask & 1) == 0; mask >>= 1, greenshift++)
276 for(blueshift = 0, mask = bluemask; (mask & 1) == 0; mask >>= 1, blueshift++)
279 //printf("shifts: %u %u %u\n", redshift, greenshift, blueshift);
281 for(int r = 0; r < frame->height; r++) {
282 for(int c = 0; c < frame->width; c++, data += nbyte) {
283 int at = r * frame->widthStep + c * 3;
284 frame->imageData[at] = ((*((gint *)data)) & redmask) >> redshift;
285 frame->imageData[at+1] = ((*((gint *)data)) & greenmask) >> greenshift;
286 frame->imageData[at+2] = ((*((gint *)data)) & bluemask) >> blueshift;
290 // printf("converted buffer\n");
292 gst_buffer_unref(cap->buffer);
298 static double icvGetProperty_GStreamer(CvCapture_GStreamer *cap, int id)
305 CV_WARN("GStreamer: no pipeline");
310 case CV_CAP_PROP_POS_MSEC:
311 format = GST_FORMAT_TIME;
312 if(!gst_element_query_position(cap->pipeline, &format, &value)) {
313 CV_WARN("GStreamer: unable to query position of stream");
316 return value * 1e-6; // nano seconds to milli seconds
317 case CV_CAP_PROP_POS_FRAMES:
318 format = GST_FORMAT_DEFAULT;
319 if(!gst_element_query_position(cap->pipeline, &format, &value)) {
320 CV_WARN("GStreamer: unable to query position of stream");
324 case CV_CAP_PROP_POS_AVI_RATIO:
325 format = GST_FORMAT_PERCENT;
326 if(!gst_element_query_position(cap->pipeline, &format, &value)) {
327 CV_WARN("GStreamer: unable to query position of stream");
330 // printf("value %llu %llu %g\n", value, GST_FORMAT_PERCENT_MAX, ((double) value) / GST_FORMAT_PERCENT_MAX);
331 return ((double) value) / GST_FORMAT_PERCENT_MAX;
332 case CV_CAP_PROP_FRAME_WIDTH:
333 case CV_CAP_PROP_FRAME_HEIGHT:
334 case CV_CAP_PROP_FPS:
335 case CV_CAP_PROP_FOURCC:
337 case CV_CAP_PROP_FRAME_COUNT:
338 format = GST_FORMAT_DEFAULT;
339 if(!gst_element_query_duration(cap->pipeline, &format, &value)) {
340 CV_WARN("GStreamer: unable to query position of stream");
344 case CV_CAP_PROP_FORMAT:
345 case CV_CAP_PROP_MODE:
346 case CV_CAP_PROP_BRIGHTNESS:
347 case CV_CAP_PROP_CONTRAST:
348 case CV_CAP_PROP_SATURATION:
349 case CV_CAP_PROP_HUE:
350 case CV_CAP_PROP_GAIN:
351 case CV_CAP_PROP_CONVERT_RGB:
354 CV_WARN("GStreamer: unhandled property");
360 static void icvRestartPipeline(CvCapture_GStreamer *cap)
362 CV_FUNCNAME("icvRestartPipeline");
366 printf("restarting pipeline, going to ready\n");
368 if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_READY) ==
369 GST_STATE_CHANGE_FAILURE) {
370 CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
374 printf("ready, relinking\n");
376 gst_element_unlink(cap->source, cap->decodebin);
377 printf("filtering with %s\n", gst_caps_to_string(cap->caps));
378 gst_element_link_filtered(cap->source, cap->decodebin, cap->caps);
380 printf("relinked, pausing\n");
382 if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PAUSED) ==
383 GST_STATE_CHANGE_FAILURE) {
384 CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
388 printf("state now paused\n");
393 static void icvSetFilter(CvCapture_GStreamer *cap, const char *property, int type, int v1, int v2)
395 printf("setting cap %p %s %d %d %d\n", cap->caps, property, type, v1, v2);
398 if(type == G_TYPE_INT)
399 cap->caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, NULL);
401 cap->caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, v2, NULL);
403 printf("caps before setting %s\n", gst_caps_to_string(cap->caps));
404 if(type == G_TYPE_INT)
405 gst_caps_set_simple(cap->caps, "video/x-raw-rgb", property, type, v1, NULL);
407 gst_caps_set_simple(cap->caps, "video/x-raw-rgb", property, type, v1, v2, NULL);
410 icvRestartPipeline(cap);
413 static void icvRemoveFilter(CvCapture_GStreamer *cap, const char *filter)
418 GstStructure *s = gst_caps_get_structure(cap->caps, 0);
419 gst_structure_remove_field(s, filter);
421 icvRestartPipeline(cap);
424 static int icvSetProperty_GStreamer(CvCapture_GStreamer *cap, int id, double value)
430 CV_WARN("GStreamer: no pipeline");
435 case CV_CAP_PROP_POS_MSEC:
436 format = GST_FORMAT_TIME;
437 flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
438 if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format,
439 flags, (gint64) (value * GST_MSECOND))) {
440 CV_WARN("GStreamer: unable to seek");
443 case CV_CAP_PROP_POS_FRAMES:
444 format = GST_FORMAT_DEFAULT;
445 flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
446 if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format,
447 flags, (gint64) value)) {
448 CV_WARN("GStreamer: unable to seek");
451 case CV_CAP_PROP_POS_AVI_RATIO:
452 format = GST_FORMAT_PERCENT;
453 flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
454 if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format,
455 flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
456 CV_WARN("GStreamer: unable to seek");
459 case CV_CAP_PROP_FRAME_WIDTH:
461 icvSetFilter(cap, "width", G_TYPE_INT, (int) value, 0);
463 icvRemoveFilter(cap, "width");
465 case CV_CAP_PROP_FRAME_HEIGHT:
467 icvSetFilter(cap, "height", G_TYPE_INT, (int) value, 0);
469 icvRemoveFilter(cap, "height");
471 case CV_CAP_PROP_FPS:
475 if(value != num) { // FIXME this supports only fractions x/1 and x/2
476 num = (int) (value * 2);
481 icvSetFilter(cap, "framerate", GST_TYPE_FRACTION, num, denom);
483 icvRemoveFilter(cap, "framerate");
485 case CV_CAP_PROP_FOURCC:
486 case CV_CAP_PROP_FRAME_COUNT:
487 case CV_CAP_PROP_FORMAT:
488 case CV_CAP_PROP_MODE:
489 case CV_CAP_PROP_BRIGHTNESS:
490 case CV_CAP_PROP_CONTRAST:
491 case CV_CAP_PROP_SATURATION:
492 case CV_CAP_PROP_HUE:
493 case CV_CAP_PROP_GAIN:
494 case CV_CAP_PROP_CONVERT_RGB:
497 CV_WARN("GStreamer: unhandled property");
503 // connect decodebin's dynamically created source pads to colourconverter
505 static void icvNewPad(GstElement *decodebin, GstPad *pad, gboolean last, gpointer data)
507 GstElement *sink = GST_ELEMENT(data);
513 sinkpad = gst_element_get_pad(sink, "sink");
515 if(GST_PAD_IS_LINKED(sinkpad)) {
516 g_print("sink is already linked\n");
517 g_object_unref(sinkpad);
521 /* check media type */
522 caps = gst_pad_get_caps(pad);
523 str = gst_caps_get_structure(caps, 0);
524 const char *structname = gst_structure_get_name(str);
525 // g_print("new pad %s\n", structname);
526 if(!g_strrstr(structname, "video")) {
527 gst_caps_unref(caps);
528 gst_object_unref(sinkpad);
531 printf("linking pad %s\n", structname);
534 gst_pad_link (pad, sinkpad);
536 gst_caps_unref(caps);
537 gst_object_unref(sinkpad);
540 static CvCapture_GStreamer * icvCreateCapture_GStreamer(int type, const char *filename)
542 CvCapture_GStreamer *capture = 0;
543 CV_FUNCNAME("cvCaptureFromCAM_GStreamer");
547 // teststreamer(filename);
552 // printf("gst_init\n");
553 gst_init (NULL, NULL);
555 // according to the documentation this is the way to register a plugin now
556 // unfortunately, it has not propagated into my distribution yet...
557 // gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
558 // "opencv-appsink", "Element application sink",
559 // "0.1", appsink_plugin_init, "LGPL", "highgui", "opencv",
560 // "http://opencvlibrary.sourceforge.net/");
565 const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"};
566 //printf("entered capturecreator %s\n", sourcetypes[type]);
569 GstElement *source = gst_element_factory_make(sourcetypes[type], NULL);
573 if(type == CV_CAP_GSTREAMER_FILE)
574 g_object_set(G_OBJECT(source), "location", filename, NULL);
576 GstElement *colour = gst_element_factory_make("ffmpegcolorspace", NULL);
578 GstElement *sink = gst_element_factory_make("opencv-appsink", NULL);
579 GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
580 gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
581 // gst_caps_unref(caps);
582 gst_base_sink_set_sync(GST_BASE_SINK(sink), false);
583 // g_signal_connect(sink, "new-buffer", G_CALLBACK(newbuffer), NULL);
585 GstElement *decodebin = gst_element_factory_make("decodebin", NULL);
586 g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(icvNewPad), colour);
588 GstElement *pipeline = gst_pipeline_new (NULL);
590 gst_bin_add_many(GST_BIN(pipeline), source, decodebin, colour, sink, NULL);
592 // printf("added many\n");
595 case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps
596 caps = gst_caps_new_simple("video/x-raw-rgb",
597 "width", G_TYPE_INT, 640,
598 "height", G_TYPE_INT, 480,
599 "framerate", GST_TYPE_FRACTION, 30, 1,
601 if(!gst_element_link_filtered(source, decodebin, caps)) {
602 CV_ERROR(CV_StsError, "GStreamer: cannot link v4l2src -> decodebin\n");
603 gst_object_unref(pipeline);
606 gst_caps_unref(caps);
608 case CV_CAP_GSTREAMER_V4L:
609 case CV_CAP_GSTREAMER_1394:
610 case CV_CAP_GSTREAMER_FILE:
611 if(!gst_element_link(source, decodebin)) {
612 CV_ERROR(CV_StsError, "GStreamer: cannot link filesrc -> decodebin\n");
613 gst_object_unref(pipeline);
619 if(!gst_element_link(colour, sink)) {
620 CV_ERROR(CV_StsError, "GStreamer: cannot link colour -> sink\n");
621 gst_object_unref(pipeline);
625 // printf("linked, pausing\n");
627 if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
628 GST_STATE_CHANGE_FAILURE) {
629 CV_WARN("GStreamer: unable to set pipeline to paused\n");
630 // icvHandleMessage(capture);
631 // cvReleaseCapture((CvCapture **)(void *)&capture);
632 gst_object_unref(pipeline);
637 if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED) ==
638 GST_STATE_CHANGE_FAILURE) {
639 CV_WARN("GStreamer: unable to set pipeline to paused\n");
640 // icvHandleMessage(capture);
641 // cvReleaseCapture((CvCapture **)(void *)&capture);
642 gst_object_unref(pipeline);
647 // printf("state now paused\n");
649 // construct capture struct
650 capture = (CvCapture_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer));
651 memset(capture, 0, sizeof(CvCapture_GStreamer));
652 capture->type = type;
653 capture->pipeline = pipeline;
654 capture->source = source;
655 capture->decodebin = decodebin;
656 capture->colour = colour;
657 capture->appsink = sink;
659 icvHandleMessage(capture);
661 OPENCV_ASSERT(capture,
662 "cvCaptureFromFile_GStreamer( const char * )", "couldn't create capture");
664 // GstClock *clock = gst_pipeline_get_clock(GST_PIPELINE(pipeline));
665 // printf("clock %s\n", gst_object_get_name(GST_OBJECT(clock)));
675 // image sequence writer
678 typedef struct CvVideoWriter_GStreamer {
680 unsigned currentframe;
683 static int icvWriteFrame_GStreamer( CvVideoWriter* writer, const IplImage* image )
685 CvVideoWriter_GStreamer *wri = (CvVideoWriter_GStreamer *)writer;
690 while(snprintf(x, size, wri->filename, wri->currentframe) == size - 1) {
693 x = (char *)malloc(size);
695 x = (char *)realloc(x, size);
698 int ret = cvSaveImage(x, image);
708 static void icvReleaseVideoWriter_GStreamer( CvVideoWriter** writer )
710 CvVideoWriter_GStreamer **wri = (CvVideoWriter_GStreamer **)writer;
712 free((*wri)->filename);
715 CvVideoWriter* cvCreateVideoWriter_GStreamer( const char* filename )
717 CvVideoWriter_GStreamer *writer;
720 char *name = icvExtractPattern(filename, &offset);
727 while(snprintf(x, size, name, 0) == size - 1) {
730 x = (char *)malloc(size);
732 x = (char *)realloc(x, size);
734 if(!cvHaveImageWriter(x)) {
742 writer = (CvVideoWriter_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer));
743 memset(writer, 0, sizeof(CvVideoWriter_GStreamer));
744 writer->filename = strdup(name);
745 writer->currentframe = offset;
747 return (CvVideoWriter *)writer;
752 class CvCapture_GStreamer_CPP : public CvCapture
755 CvCapture_GStreamer_CPP() { captureGS = 0; }
756 virtual ~CvCapture_GStreamer_CPP() { close(); }
758 virtual bool open( int type, const char* filename );
759 virtual void close();
761 virtual double getProperty(int);
762 virtual bool setProperty(int, double);
763 virtual bool grabFrame();
764 virtual IplImage* retrieveFrame(int);
767 CvCapture_GStreamer* captureGS;
770 bool CvCapture_GStreamer_CPP::open( int type, const char* filename )
773 captureGS = icvCreateCapture_GStreamer( type, filename );
774 return captureGS != 0;
777 void CvCapture_GStreamer_CPP::close()
781 icvClose_GStreamer( captureGS );
782 cvFree( &captureGS );
786 bool CvCapture_GStreamer_CPP::grabFrame()
788 return captureGS ? icvGrabFrame_GStreamer( captureGS ) != 0 : false;
791 IplImage* CvCapture_GStreamer_CPP::retrieveFrame(int)
793 return captureGS ? (IplImage*)icvRetrieveFrame_GStreamer( captureGS, 0 ) : 0;
796 double CvCapture_GStreamer_CPP::getProperty( int propId )
798 return captureGS ? icvGetProperty_GStreamer( captureGS, propId ) : 0;
801 bool CvCapture_GStreamer_CPP::setProperty( int propId, double value )
803 return captureGS ? icvSetProperty_GStreamer( captureGS, propId, value ) != 0 : false;
806 CvCapture* cvCreateCapture_GStreamer( int type, const char* filename )
808 CvCapture_GStreamer_CPP* capture = new CvCapture_GStreamer_CPP;
810 if( capture->open( type, filename ))