libmoldeo (Moldeo 1.0 Core)  1.0
libmoldeo is the group of objects and functions that executes the basic operations of Moldeo 1.0 Platform.
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
moGsGraph.cpp
Go to the documentation of this file.
1 /*******************************************************************************
2 
3  moGsGraph.cpp
4 
5  ****************************************************************************
6  * *
7  * This source is free software; you can redistribute it and/or modify *
8  * it under the terms of the GNU General Public License as published by *
9  * the Free Software Foundation; either version 2 of the License, or *
10  * (at your option) any later version. *
11  * *
12  * This code is distributed in the hope that it will be useful, but *
13  * WITHOUT ANY WARRANTY; without even the implied warranty of *
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
15  * General Public License for more details. *
16  * *
17  * A copy of the GNU General Public License is available on the World *
18  * Wide Web at <http://www.gnu.org/copyleft/gpl.html>. You can also *
19  * obtain it by writing to the Free Software Foundation, *
20  * Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
21  * *
22  ****************************************************************************
23 
24  Copyright(C) 2006 Fabricio Costa
25 
26  Authors:
27  Fabricio Costa
28 
29  Gstreamer list of defined types:
30  http://gstreamer.freedesktop.org/data/doc/gstreamer/head/pwg/html/section-types-definitions.html
31 
32 *******************************************************************************/
33 #include "moGsGraph.h"
34 
35 #include <gst/gst.h>
36 
37 #ifndef GSTVERSION
38 #include <gst/interfaces/propertyprobe.h>
39 #else
40 #endif // GSTVERSION
41 //#define GSTVERSION
42 #include "moFileManager.h"
43 
45 #ifndef MO_GSTREAMER
46 #define MO_GSTREAMER
47 #endif
48 
49 #ifdef MO_GSTREAMER
50 
51 #ifdef GSTVERSION
52  #include <gst/app/gstappsink.h>
53  #define DECODEBIN "decodebin"
54  #define VIDEOCONVERT "videoconvert"
55 #else
56  #define VIDEOCONVERT "ffmpegcolorspace"
57  #ifdef MO_MACOSX
58  #define DECODEBIN "decodebin"
59  #else
60  #define DECODEBIN "decodebin2"
61  #endif
62 #endif
63 
64 static gboolean bus_call ( GstBus *bus, GstMessage *msg, void* user_data)
65 {
66  //cout << "bus_call: new message" << endl;
67  bus = NULL;
68  moGsGraph* pGsGraph = (moGsGraph*) user_data;
69 
70  if (true) {
71  const GstStructure *s;
72 
73  s = gst_message_get_structure ((GstMessage *)msg);
74 
75  /*
76  pGsGraph->MODebug2->Message(
77  moText("moGsGraph:: Got message from element \"")
78  + moText( GST_STR_NULL (GST_ELEMENT_NAME (GST_MESSAGE_SRC (msg))) )
79  + moText("\" (")
80  + moText(gst_message_type_get_name (GST_MESSAGE_TYPE (msg)))
81  + moText(")") );
82 */
83  if (s) {
84  gchar *sstr;
85 
86  sstr = gst_structure_to_string (s);
87  //pGsGraph->MODebug2->Message( moText(sstr) );
88  //g_print ("%s\n", sstr);
89  g_free (sstr);
90  } else {
91 
92  //pGsGraph->MODebug2->Message( moText(" <no message details>") );
93  //g_print ("no message details\n");
94  }
95  }
96 
97  switch (GST_MESSAGE_TYPE (msg))
98  {
99  case GST_MESSAGE_EOS:
100  {
101  //g_message ("End-of-stream");
102  pGsGraph->MODebug2->Message(moText("moGsGraph:: EOS <End-of-stream> "));
103  pGsGraph->SetEOS(true);
104  //g_main_loop_quit (loop);
105  break;
106  }
107 
108  case GST_MESSAGE_ERROR:
109  {
110  gchar *debug;
111 
112  GError *err;
113 
114  gst_message_parse_error ((GstMessage *)msg, &err, &debug);
115  pGsGraph->MODebug2->Error(moText("moGsGraph:: gst message error: ") + moText(debug));
116  g_free (debug);
117  //g_error ("%s", err->message);
118  //g_error_free (err);
119  //g_main_loop_quit (loop);
120 
121  break;
122  }
123 
124  default:
125 
126  break;
127 
128 }
129 
130  return true;
131 }
132 
133 
134 /*
136 typedef struct {
137  GstMiniObject mini_object;
138 
139  // pointer to data and its size
140  guint8 *data;
141  guint size;
142 
143  // timestamp
144  GstClockTime timestamp;
145  GstClockTime duration;
146 
147  // the media type of this buffer
148  GstCaps *caps;
149 
150  // media specific offset
151  guint64 offset;
152  guint64 offset_end;
153 
154  guint8 *malloc_data;
155 } GstBuffer;
156 */
157 
158 
161  moGPointer u_data
162  ) {
163 
164  moGsGraph* pGsGraph;
165 
166  if (u_data!=0) {
167  pGsGraph = (moGsGraph*)u_data;
168  if (pGsGraph) {
169  pGsGraph->MODebug2->Error(moText("moGsGraph::cb_buffer_disconnected !!!"));
170  }
171  }
172 
173  //moAbstract::MODebug2->Error(moText("moGsGraph::cb_buffer_disconnected !!!"));
174  return false;
175 }
176 
204 #ifdef GSTVERSION
206 moGsGraph::appsink_new_sample( moGstAppSink* appsink, moGPointer user_data ) {
207 
208  moGsGraph* pGsGraph = (moGsGraph*) user_data;
209  int w = pGsGraph->GetVideoFormat().m_Width;
210  int h = pGsGraph->GetVideoFormat().m_Height;
211 
212  if (!pGsGraph) return GST_FLOW_ERROR;
213  //pGsGraph->MODebug2->Message("new sample");
214 
215 
216  GstAppSink* psink = (GstAppSink*) appsink;
217  if (!psink) return GST_FLOW_ERROR;
218 
219  GstSample* sample = gst_app_sink_pull_sample( psink );
220  if (!sample) return GST_FLOW_OK;
221 
222  GstCaps* bcaps = gst_sample_get_caps( sample );
223  if (!bcaps) return GST_FLOW_OK;
224 
225  GstBuffer* Gbuffer = gst_sample_get_buffer (sample);
226  int bsize = gst_buffer_get_size( Gbuffer );
227  if (!( bsize>0 && (int)bsize<=(h*w*4) )) return GST_FLOW_ERROR;
228  //pGsGraph->MODebug2->Message(moText("Bucket receiving size: ") + IntToStr(bsize) );
229 
230  //gst buffer to moldeo bucketpool
231  moBucket *pbucket=NULL;
232 
233  if (!pGsGraph->m_pBucketsPool) return GST_FLOW_ERROR;
234  if(pGsGraph->m_pBucketsPool->IsFull()) {
235  return GST_FLOW_OK;
236  }
237 
238  pbucket = new moBucket();
239  if (pbucket==NULL) return GST_FLOW_ERROR;
240 
241  GstMapInfo mapinfo;
242  gst_buffer_map ( Gbuffer, &mapinfo, GST_MAP_READ);
243 
244  MOubyte color = mapinfo.data[0];
245  //pGsGraph->MODebug2->Message(moText("color: ") + IntToStr(color) );
246 
247  pbucket->SetBuffer( bsize,(MOubyte*)mapinfo.data );
248 
249  bool added_bucket = pGsGraph->m_pBucketsPool->AddBucket( pbucket );
250  if(!added_bucket)
251  pGsGraph->MODebug2->Error(moText("Bucket error"));
252 
253  gst_buffer_unmap ( Gbuffer, &mapinfo );
254  gst_sample_unref(sample);
255  return GST_FLOW_OK;
256 }
257 
259 moGsGraph::appsink_new_preroll( moGstAppSink* appsink, moGPointer user_data ) {
260 
261  return 0;
262 }
263 
264 void
265 moGsGraph::appsink_eos( moGstAppSink* appsink, moGPointer user_data ) {
266 
267 }
268 #endif
269 
270 
271 
272 
273 #ifndef GSTVERSION
276 #else
279 #endif
280 {
281  moGsGraph* pGsGraph;
282  pad = NULL;
283  GstStructure* str = NULL;
284  GstBuffer* Gbuffer;
285  GstCaps* caps = NULL;
286  GstPad* Gpad = NULL;
287 
288 #ifndef GSTVERSION
289  Gbuffer = (GstBuffer*)buffer;
290  caps = Gbuffer->caps;
291 #else
292  GstPadProbeInfo* Ginfo = (GstPadProbeInfo*) info;
293  Gbuffer = GST_PAD_PROBE_INFO_BUFFER ( Ginfo );
294  Gpad = (GstPad*)pad;
295  if (Gpad)
296  caps = gst_pad_get_current_caps( Gpad );
297 #endif
298 
299  if (caps)
300  str = gst_caps_get_structure ( (caps), 0);
301  else
302  return false;
303 
304  if (str==NULL)
305  return FALSE;
306 
307 
308  const gchar *sstr;
309  const gchar *strname;
310 
311  strname = gst_structure_get_name( str );
312  sstr = gst_structure_to_string (str);
313 
314  //cout << "new data: timestamp: " << buffer->timestamp << " duration:" << buffer->duration << " size:" << buffer->size << " caps:" << sstr << endl;
315  //moAbstract::MODebug2->Message( moText(" moGsGraph:: cb_have_data") );
316 
317  gchar* isaudio = NULL;
318  gchar* isvideo = NULL;
319 
320  isaudio = g_strrstr (strname, "audio");
321  isvideo = g_strrstr (strname, "video");
322 
323  if (u_data!=0) {
324  pGsGraph = (moGsGraph*)u_data;
325 
326  if (isvideo) {
327  if (pGsGraph->m_VideoFormat.m_WaitForFormat)
328  pGsGraph->SetVideoFormat( caps, Gbuffer );
329  }
330 
331  if (isaudio) {
332  if (pGsGraph->m_AudioFormat.m_WaitForFormat)
333  pGsGraph->SetAudioFormat( caps, Gbuffer );
334  }
335  } else {
336  //moAbstract::MODebug2->Error( moText(" moGsGraph:: cb_have_data error: no user data!!") );
337  return true;//siga intentando
338  }
339 
340  //pGsGraph->MODebug2->Message(moText("moGsGraph::cb_have_data receiving..."));
341 
342  //return true;
343 
344  int w = pGsGraph->GetVideoFormat().m_Width;
345  int h = pGsGraph->GetVideoFormat().m_Height;
346 
347  //cout << "w:" << w << "h:" << h << endl;
348 
349  if (Gbuffer ) {
350  int bsize;
351 #ifndef GSTVERSION
352  bsize = Gbuffer->size;
353 #else
354  bsize = gst_buffer_get_size( Gbuffer );
355 #endif
356  if (isvideo) {
357  if ( bsize>0 && (int)bsize<=(h*w*4) ) {
358  //g_passing buffer to bucketpool
359  moBucket *pbucket=NULL;
360 
361  if (pGsGraph->m_pBucketsPool)
362  if(!pGsGraph->m_pBucketsPool->IsFull()) {
363 
364  //pGsGraph->MODebug2->Message(moText("Bucket receiving size: ") + IntToStr(Gbuffer->size) );
365 
366  pbucket = new moBucket();
367  if(pbucket!=NULL) {
368 
369  //pGsGraph->m_VideoFormat.m_BufferSize = Gbuffer->size;
370  //pGsGraph->m_VideoFormat.m_TimePerFrame = Gbuffer->duration;
371 
372  gint value_numerator, value_denominator;
373  gst_structure_get_fraction( str, "framerate", &value_numerator, &value_denominator );
374 
375  //MOuint frate = (value_numerator * 100) / value_denominator;
376  //MODebug2->Push( " frate: "+ IntToStr(frate) + " timeperframe: " + IntToStr(Gbuffer->duration));
377 #ifndef GSTVERSION
378  pbucket->SetBuffer( bsize,(MOubyte*)Gbuffer->data );
379 #else
380  pbucket->SetBuffer( bsize,(MOubyte*)GST_PAD_PROBE_INFO_DATA(Ginfo) );
381 #endif
382  //pbucket->BuildBucket( w*h*4, 100 );
383  //MODebug2->Push("bucket created.");
384  //gst_buffer_extract( Gbuffer, 0, pbucket->GetBuffer(), Gbuffer->size );
385 
386 
387  if(!pGsGraph->m_pBucketsPool->AddBucket( pbucket )) {
388  pGsGraph->MODebug2->Error(moText("Bucket error"));
389  }// else MODebug2->Push("bucket Added.");
390  // else cout << "bucket passed!!" << buffer->size << "timestamp:" << buffer->timestamp << endl;
391  }
392 
393  }
394 
395  } else {
396  pGsGraph->MODebug2->Error( moText(" moGsGraph:: cb_have_data error: wrong buffer size:")
397  + IntToStr(bsize));
398 
399  }
400  }
401  } else {
402  pGsGraph->MODebug2->Error( moText(" moGsGraph:: cb_have_data error: no Gbuffer data!!") );
403  }
404 
405  return TRUE;
406 }
407 
408 
409 #ifndef GSTVERSION
410 void
412 
413  rtspsrc = NULL;
414  GstCaps *caps = NULL;
415  GstPadLinkReturn padlink;
416  gchar* padname = NULL;
417  const gchar* strname = NULL;
418  const gchar* medianame = NULL;
419  GstStructure *str = NULL;
420  GstPad* Gpad = (GstPad*) pad;
421  moGsGraph* pGsGraph;
422 
423  if (gst_pad_is_linked(Gpad)) {
424  return;
425  }
426  if (u_data!=0) {
427  pGsGraph = (moGsGraph*)u_data;
428 
429  /* check media type */
430  caps = gst_pad_get_caps (Gpad);
431  padname = gst_pad_get_name(Gpad);
432  if (padname) {
433  str = gst_caps_get_structure (caps, 0);
434 
435  const gchar *sstr;
436 
437  sstr = gst_structure_to_string (str);
438  strname = gst_structure_get_name (str);
439  medianame = gst_structure_get_string (str, "media");
440  //strname = GST_STRUCTURE(str)->has_field("media");
441 
442  moText dbgstr = medianame;
443  pGsGraph->MODebug2->Push( dbgstr );
444 
445  if (g_strrstr (medianame, "video")) {
447  if ( pGsGraph->m_pRTSPDepaySink ) {
448  padlink = gst_pad_link ( Gpad, (GstPad*)pGsGraph->m_pRTSPDepaySink);
449  if (padlink==GST_PAD_LINK_OK) {
451  }
452  } else
453  if ( pGsGraph->m_pHTTPSource ) {
454  padlink = gst_pad_link ( Gpad, (GstPad*)pGsGraph->m_pDecoderBin );
455  if (padlink==GST_PAD_LINK_OK) {
457  }
458  }
459  }
460 
461  }
462  }
463 
464 }
465 
466 #else
467 
468 #endif
469 
470 
471 #ifndef GSTVERSION
472 void
474 #else
475 void
476 moGsGraph::cb_pad_added_new ( moGstElement *decodebin, moGstPad *pad, moGPointer u_data)
477 #endif
478 {
479  decodebin = NULL;
480 #ifndef GSTVERSION
481  last = false;
482 #endif
483  GstCaps *caps = NULL;
484  GstPad *videopad = NULL;
485 // GstPad *audiopad = NULL;
486  GstPad *audiopadinconverter = NULL;
487  GstPadLinkReturn padlink;
488  gchar* padname = NULL;
489  const gchar* strname = NULL;
490  GstStructure *str = NULL;
491  GstPad* Gpad = (GstPad*) pad;
492 
493  moGsGraph* pGsGraph = NULL;
494  GstElement* SinkElement = NULL;
495 
496  cout << "cb_pad_added_new" << endl;
497 
498 
499  if (gst_pad_is_linked(Gpad)) {
500  cout << "cb_pad_added_new already linked!" << endl;
501  return;
502  }
503 
504 
505  if (u_data!=0) {
506  pGsGraph = (moGsGraph*)u_data;
507  /* check media type */
508 #ifndef GSTVERSION
509  caps = gst_pad_get_caps (Gpad);
510 #else
511  caps = gst_pad_get_current_caps(Gpad);
512 #endif
513  padname = gst_pad_get_name(Gpad);
514  if (padname) {
515  str = gst_caps_get_structure (caps, 0);
516 
517  const gchar *sstr=NULL;
518  if (str) {
519  sstr = gst_structure_to_string (str);
520  cout << "cb_newpad: new pad: " << padname << "caps:" << sstr << endl;
521  } else {
522  MODebug2->Error(moText("moGsGraph::cb_newpad > gst_caps_get_structure is empty") );
523  }
524 
525  if (sstr==NULL) {
526  MODebug2->Error(moText("moGsGraph::cb_newpad > sstr gst_structure_to_string is empty") );
527  } else strname = gst_structure_get_name (str);
528  //cout << "cb_newpad: new pad: " << padname << "strname:" << strname << endl;
529  bool forcing_video = false;
530  bool is_video = false;
531  bool is_audio = false;
532  if (strname==NULL) {
533  //cout << "cb_newpad: strname==NULL" << endl;
534  MODebug2->Error(moText("moGsGraph::cb_newpad > gst_structure_to_string is empty, forcing video!") );
535  //return;
536  forcing_video = true;
537  } else {
538  is_video = g_strrstr (strname, "video");
539  is_audio = g_strrstr (strname, "audio");
540  }
541 
542 
543 
544 
545  if (is_audio) {
546  pGsGraph->m_pAudioPad = Gpad;
547 
548  //MODebug2->Push(moText("moGsGraph::cb_newpad: audio pad created"));
549 
550  if (pGsGraph->m_pAudioConverter) {
551 #ifndef GSTVERSION
552  audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->m_pAudioConverter, "sink");
553 #else
554 audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->m_pAudioConverter, "sink");
555 #endif
556  padlink = gst_pad_link (Gpad, audiopadinconverter);
557 
558  GstPad* srcAudio = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pAudioConverter, "src");
559 
560  if (padlink==GST_PAD_LINK_OK) {
561 #ifndef GSTVERSION
562  pGsGraph->cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcAudio, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
563 #else
564  /*pGsGraph->cb_have_data_handler_id = gst_pad_add_probe ( srcAudio,
565  GST_PAD_PROBE_TYPE_BUFFER,
566  (GstPadProbeCallback) cb_have_data,
567  pGsGraph,
568  (GDestroyNotify) (cb_buffer_disconnected) );*/
569 #endif
570  }
571 
572  } else if (pGsGraph->m_pAudioSink) {
573  audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->m_pAudioSink, "sink");
574  padlink = gst_pad_link (Gpad, audiopadinconverter);
575  }
576 
577 
578  } else if (is_video || forcing_video ) {
579  pGsGraph->m_pVideoPad = Gpad;
580 
581  MODebug2->Message(moText("moGsGraph::cb_newpad: video pad created"));
582  if (pGsGraph->m_pVideoScale==NULL) {
583  //version directa a videoscale
584  if (!(GstElement*)pGsGraph->m_pColorSpaceInterlace) {
585  SinkElement = (GstElement*)pGsGraph->m_pColorSpace;
586  } else {
587  SinkElement = (GstElement*)pGsGraph->m_pColorSpaceInterlace;
588  }
589 #ifndef GSTVERSION
590  videopad = gst_element_get_pad ( SinkElement, "sink");
591  if (videopad) {
592  padlink = gst_pad_link( Gpad, videopad );
593  }
594 #else
595  videopad = gst_element_get_static_pad( SinkElement, "sink");
596  if (videopad) {
597  padlink = gst_pad_link( Gpad, videopad );
598  }
599 #endif
600  //version con deinterlace
601  //videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoDeinterlace, "sink");
602 
603  //bool res = gst_pad_set_caps( gst_element_get_pad ( pGsGraph->m_pColorSpace, "src"), gst_caps_new_simple ("video/x-raw-rgb","bpp", G_TYPE_INT, 24, NULL) );
604 
605  if (padlink==GST_PAD_LINK_OK) {
606 // caps = gst_pad_get_caps( Gpad );
607  //pGsGraph->SetVideoFormat(caps);
608 #ifndef GSTVERSION
609  GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, "src");
610  pGsGraph->cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcRGB, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
611 #else
612  GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pFakeSink, "sink");
613  /*
614  pGsGraph->cb_have_data_handler_id = gst_pad_add_probe ( srcRGB,
615  GST_PAD_PROBE_TYPE_BUFFER,
616  (GstPadProbeCallback) cb_have_data,
617  pGsGraph,
618  (GDestroyNotify) (cb_buffer_disconnected) );
619  */
620 #endif
621  //cout << "cb_newpad: linked pads..." << endl;
622  } else MODebug2->Error(moText("moGsGraph::cb_newpad > padlink BAD!") );
623 
624  } else {
625  //version 2 con videoscale
626 
627  //version directa a videoscale
628 #ifndef GSTVERSION
629 videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoScale, "sink");
630 #else
631 videopad = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pVideoScale, "sink");
632 #endif
633  //version con deinterlace
634  //videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoDeinterlace, "sink");
635  //bool res = gst_pad_set_caps( gst_element_get_pad ( pGsGraph->m_pColorSpace, "src"), gst_caps_new_simple ("video/x-raw-rgb","bpp", G_TYPE_INT, 24, NULL) );
636 
637  padlink = gst_pad_link( Gpad, videopad );
638 
639  if (padlink==GST_PAD_LINK_OK) {
640  //caps = gst_pad_get_caps( Gpad );
641  //pGsGraph->SetVideoFormat(caps);
642 #ifndef GSTVERSION
643  GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, "src");
644  pGsGraph->cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcRGB, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
645 #else
646  GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pColorSpace, "src");
647  pGsGraph->cb_have_data_handler_id = gst_pad_add_probe ( srcRGB,
648  GST_PAD_PROBE_TYPE_BUFFER,
649  (GstPadProbeCallback) cb_have_data,
650  pGsGraph,
651  (GDestroyNotify) (cb_buffer_disconnected) );
652 #endif
653  //cout << "cb_newpad: linked pads..." << endl;
654  }
655  }
656  }
657  }
658 
659  }
660 
661 }
662 
663 
664 //#ifndef GSTVERSION
665 void
667 {
668  decodebin = NULL;
669  GstCaps *caps = NULL;
670  GstPad *videopad = NULL;
671 // GstPad *audiopad = NULL;
672  GstPad *audiopadinconverter = NULL;
673  GstPadLinkReturn padlink;
674  gchar* padname = NULL;
675  const gchar* strname = NULL;
676  GstStructure *str = NULL;
677  GstPad* Gpad = (GstPad*) pad;
678 
679  moGsGraph* pGsGraph;
680  GstElement* SinkElement = NULL;
681 
682  cout << "pad added" << endl;
683  if (gst_pad_is_linked(Gpad)) {
684  return;
685  }
686 
687 
688  if (u_data!=0) {
689  pGsGraph = (moGsGraph*)u_data;
690  /* check media type */
691 #ifndef GSTVERSION
692  caps = gst_pad_get_caps (Gpad);
693 #else
694  caps = gst_pad_get_current_caps(Gpad);
695 #endif
696  padname = gst_pad_get_name(Gpad);
697  if (padname) {
698  str = gst_caps_get_structure (caps, 0);
699 
700  const gchar *sstr;
701 
702  sstr = gst_structure_to_string (str);
703  cout << "cb_newpad: new pad: " << padname << "caps:" << sstr << endl;
704 
705  strname = gst_structure_get_name (str);
706 
707  if (g_strrstr (strname, "audio")) {
708  pGsGraph->m_pAudioPad = Gpad;
709 
710  //MODebug2->Push(moText("moGsGraph::cb_pad_added: audio pad created"));
711 
712  //pGsGraph->BuildAudioFilters();
713 
714  if (pGsGraph->m_pAudioConverter && 1==1) {
715 
716  gboolean link_audioresult = gst_element_link_many( (GstElement*)pGsGraph->m_pAudioConverter,
717  (GstElement*)pGsGraph->m_pAudioVolume,
718  (GstElement*)pGsGraph->m_pAudioPanorama,
719  (GstElement*)pGsGraph->m_pAudioSink, NULL );
720  if (link_audioresult) {
721 #ifndef GSTVERSION
722  audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->m_pAudioConverter, "sink");
723 #else
724  audiopadinconverter = gst_element_get_static_pad( (GstElement*) pGsGraph->m_pAudioConverter, "sink");
725 #endif
726  padlink = gst_pad_link (Gpad, audiopadinconverter);
727 
728 #ifndef GSTVERSION
729  GstPad* srcAudio = gst_element_get_pad ( (GstElement*)pGsGraph->m_pAudioConverter, "src");
730 #else
731  GstPad* srcAudio = gst_element_get_static_pad( (GstElement*)pGsGraph->m_pAudioConverter, "src");
732 #endif
733  if (padlink==GST_PAD_LINK_OK) {
734 #ifndef GSTVERSION
735  pGsGraph->cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcAudio, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
736 #else
737  pGsGraph->cb_have_data_handler_id = gst_pad_add_probe( srcAudio,
738  GST_PAD_PROBE_TYPE_BUFFER,
739  (GstPadProbeCallback) cb_have_data,
740  pGsGraph,
741  (GDestroyNotify) (cb_buffer_disconnected) );
742 #endif
743 
744  }
745  }
746  } else if (pGsGraph->m_pAudioSink && 1==1) {
747 #ifndef GSTVERSION
748  audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->m_pAudioSink, "sink");
749 #else
750  audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->m_pAudioSink, "sink");
751 #endif
752  padlink = gst_pad_link (Gpad, audiopadinconverter);
753  }
754 
755 
756  } else if (g_strrstr (strname, "video")) {
757  pGsGraph->m_pVideoPad = Gpad;
758 
759  //MODebug2->Push(moText("moGsGraph::cb_pad_added: video pad created"));
760  if (pGsGraph->m_pVideoScale==NULL) {
761  //version directa a videoscale
762  if (!(GstElement*)pGsGraph->m_pColorSpaceInterlace) {
763  SinkElement = (GstElement*)pGsGraph->m_pColorSpace;
764  } else {
765  SinkElement = (GstElement*)pGsGraph->m_pColorSpaceInterlace;
766  }
767 #ifndef GSTVERSION
768  videopad = gst_element_get_pad ( SinkElement, "sink");
769  if (videopad) {
770  padlink = gst_pad_link( Gpad, videopad );
771  }
772 #else
773  videopad = gst_element_get_static_pad( SinkElement, "sink");
774  if (videopad) {
775  padlink = gst_pad_link( Gpad, videopad );
776  }
777 #endif
778  //version con deinterlace
779  //videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoDeinterlace, "sink");
780 
781  //bool res = gst_pad_set_caps( gst_element_get_pad ( pGsGraph->m_pColorSpace, "src"), gst_caps_new_simple ("video/x-raw-rgb","bpp", G_TYPE_INT, 24, NULL) );
782 
783  if (padlink==GST_PAD_LINK_OK) {
784 #ifndef GSTVERSION
785  //pGsGraph->SetVideoFormat(caps);
786  GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, "src");
787  pGsGraph->cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcRGB, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
788  //cout << "cb_newpad: linked pads..." << endl;
789 #else
790  GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pColorSpace, "src");
791  pGsGraph->cb_have_data_handler_id = gst_pad_add_probe ( srcRGB,
792  GST_PAD_PROBE_TYPE_BUFFER,
793  (GstPadProbeCallback) cb_have_data,
794  pGsGraph,
795  (GDestroyNotify) (cb_buffer_disconnected) );
796 #endif
797  }
798  } else {
799  //version 2 con videoscale
800 
801  //version directa a videoscale
802 #ifndef GSTVERSION
803 videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoScale, "sink");
804 #else
805 videopad = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pVideoScale, "sink");
806 #endif
807  //version con deinterlace
808  //videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoDeinterlace, "sink");
809  //bool res = gst_pad_set_caps( gst_element_get_pad ( pGsGraph->m_pColorSpace, "src"), gst_caps_new_simple ("video/x-raw-rgb","bpp", G_TYPE_INT, 24, NULL) );
810 
811  padlink = gst_pad_link( Gpad, videopad );
812 
813  if (padlink==GST_PAD_LINK_OK) {
814  //caps = gst_pad_get_caps( Gpad );
815  //pGsGraph->SetVideoFormat(caps);
816 #ifndef GSTVERSION
817  GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, "src");
818  pGsGraph->cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcRGB, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
819 #else
820  GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pColorSpace, "src");
821  pGsGraph->cb_have_data_handler_id = gst_pad_add_probe ( srcRGB,
822  GST_PAD_PROBE_TYPE_BUFFER,
823  (GstPadProbeCallback) cb_have_data,
824  pGsGraph,
825  (GDestroyNotify) (cb_buffer_disconnected) );
826 #endif
827  //cout << "cb_newpad: linked pads..." << endl;
828  }
829  }
830  }
831  }
832 
833  }
834 
835 }
836 //#else
837 //#endif
838 
839 #ifndef GSTVERSION
840 void
842  moGstBuffer *buffer,
843  moGstPad *pad,
844  moGPointer user_data)
845 {
846  static gboolean white = FALSE;
847 
848  GstElement* Gfakesrc = (GstElement*)fakesrc;
849  GstBuffer* Gbuffer = (GstBuffer*)buffer;
850  GstPad* Gpad = (GstPad*)pad;
851  Gpad = NULL;
852  Gfakesrc = NULL;
853  moGsGraph* pGsGraph;
854 
855 
856  if (user_data!=0) {
857  pGsGraph = (moGsGraph*)user_data;
859  //memset (GST_BUFFER_DATA (buffer), white ? 0x44 : 0x0, GST_BUFFER_SIZE (buffer));
860  pGsGraph->CopyVideoFrame( GST_BUFFER_DATA (Gbuffer), GST_BUFFER_SIZE (Gbuffer) );
861  //memcpy( GST_BUFFER_DATA (buffer), (void*)pGsGraph->GetVideoFrame(), GST_BUFFER_SIZE (buffer) );
862  } else {
864  memset (GST_BUFFER_DATA (Gbuffer), white ? 0xff : 0x0, GST_BUFFER_SIZE (Gbuffer));
865  }
866 
867 
868 
869 
870  GstCaps *caps;
871 
872  caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, 400,
873  "height", G_TYPE_INT, 300,
874  "bpp", G_TYPE_INT, 24,
875  "depth", G_TYPE_INT, 24,
876  "framerate", GST_TYPE_FRACTION, 10, 1,
877  NULL);
878  gst_buffer_set_caps (Gbuffer, caps);
879  gst_caps_unref (caps);
880  /* this makes the image black/white */
881 
882 
883  white = !white;
884 
885 }
886 #else
887 #endif
888 
889 /* returns TRUE if there was an error or we caught a keyboard interrupt. */
890 static gboolean
891 event_loop (GstElement * pipeline, gboolean blocking, GstState target_state)
892 {
893  GstBus *bus;
894  GstMessage *message = NULL;
895  gboolean res = FALSE;
896  gboolean buffering = FALSE;
897 
898  bus = gst_element_get_bus (GST_ELEMENT (pipeline));
899 
900  if (!bus) exit(1);
901 
902  while (TRUE) {
903  message = gst_bus_poll (bus, GST_MESSAGE_ANY, blocking ? -1 : 0);
904 
905  /* if the poll timed out, only when !blocking */
906  if (message == NULL)
907  goto exit;
908 
909  /* check if we need to dump messages to the console */
910  if (true) {
911  const GstStructure *s;
912 
913  s = gst_message_get_structure (message);
914 
915  g_print (("Got Message from element \"%s\" (%s): "),
916  GST_STR_NULL (GST_ELEMENT_NAME (GST_MESSAGE_SRC (message))),
917  gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
918  if (s) {
919  gchar *sstr;
920 
921  sstr = gst_structure_to_string (s);
922  g_print ("%s\n", sstr);
923  g_free (sstr);
924  } else {
925  g_print ("no message details\n");
926  }
927  }
928 
929  switch (GST_MESSAGE_TYPE (message)) {
930 
931  case GST_MESSAGE_WARNING:{
932  GError *gerror;
933  gchar *debug;
934  gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (message));
935 
936  /* dump graph on warning */
937  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
938  GST_DEBUG_GRAPH_SHOW_ALL, "gst-launch.warning");
939 
940  gst_message_parse_warning (message, &gerror, &debug);
941  g_print (("WARNING: from element %s: %s\n"), name, gerror->message);
942  if (debug) {
943  g_print (("Additional debug info:\n%s\n"), debug);
944  }
945  g_error_free (gerror);
946  g_free (debug);
947  g_free (name);
948  break;
949  }
950  case GST_MESSAGE_ERROR:{
951  GError *gerror;
952  gchar *debug;
953 
954  /* dump graph on error */
955  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
956  GST_DEBUG_GRAPH_SHOW_ALL, "gst-launch.error");
957 
958  gst_message_parse_error (message, &gerror, &debug);
959  gst_object_default_error (GST_MESSAGE_SRC (message), gerror, debug);
960  g_error_free (gerror);
961  g_free (debug);
962  /* we have an error */
963  res = TRUE;
964  goto exit;
965  }
966  case GST_MESSAGE_STATE_CHANGED:{
967  GstState old, mnew, pending;
968 
969  gst_message_parse_state_changed (message, &old, &mnew, &pending);
970 
971  /* debug each state change
972  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "gst-launch");
973  */
974 
975  /* we only care about pipeline state change messages */
976  if (GST_MESSAGE_SRC (message) != GST_OBJECT_CAST (pipeline))
977  break;
978 
979  /* debug only overall state changes
980  {
981  gchar *dump_name;
982 
983  dump_name = g_strdup_printf ("gst-launch.%s",gst_element_state_get_name (new);
984  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline), GST_DEBUG_GRAPH_SHOW_ALL, dump_name);
985  g_free (dump_name);
986  }
987  */
988 
989  /* ignore when we are buffering since then we mess with the states
990  * ourselves. */
991  if (buffering) {
992  fprintf (stderr,
993  ("Prerolled, waiting for buffering to finish...\n"));
994  break;
995  }
996 
997  /* if we reached the final target state, exit */
998  if (target_state == GST_STATE_PAUSED && mnew == target_state)
999  goto exit;
1000 
1001  /* else not an interesting message */
1002  break;
1003  }
1004  case GST_MESSAGE_BUFFERING:{
1005  gint percent;
1006 
1007  gst_message_parse_buffering (message, &percent);
1008  fprintf (stderr, ("buffering... %d \r"), percent);
1009 
1010  /* no state management needed for live pipelines */
1011  /*
1012  if (is_live)
1013  break;
1014  */
1015 
1016  if (percent == 100) {
1017  /* a 100% message means buffering is done */
1018  buffering = FALSE;
1019  /* if the desired state is playing, go back */
1020  if (target_state == GST_STATE_PLAYING) {
1021  fprintf (stderr,
1022  ("Done buffering, setting pipeline to PLAYING ...\n"));
1023  gst_element_set_state (pipeline, GST_STATE_PLAYING);
1024  } else
1025  goto exit;
1026  } else {
1027  /* buffering busy */
1028  if (buffering == FALSE && target_state == GST_STATE_PLAYING) {
1029  /* we were not buffering but PLAYING, PAUSE the pipeline. */
1030  fprintf (stderr, ("Buffering, setting pipeline to PAUSED ...\n"));
1031  gst_element_set_state (pipeline, GST_STATE_PAUSED);
1032  }
1033  buffering = TRUE;
1034  }
1035  break;
1036  }
1037  case GST_MESSAGE_APPLICATION:{
1038  const GstStructure *s;
1039 
1040  s = gst_message_get_structure (message);
1041 
1042  if (gst_structure_has_name (s, "GstLaunchInterrupt")) {
1043  /* this application message is posted when we caught an interrupt and
1044  * we need to stop the pipeline. */
1045  fprintf (stderr, ("Interrupt: Stopping pipeline ...\n"));
1046  /* return TRUE when we caught an interrupt */
1047  res = TRUE;
1048  goto exit;
1049  }
1050  }
1051  default:
1052  /* just be quiet by default */
1053  break;
1054  }
1055  if (message)
1056  gst_message_unref (message);
1057  }
1058  g_assert_not_reached ();
1059 
1060 exit:
1061  {
1062  if (message)
1063  gst_message_unref (message);
1064  gst_object_unref (bus);
1065  return res;
1066  }
1067 }
1068 
1069 
1070 
1071 //===========================================
1072 //
1073 // Class: moGsFramewwork
1074 //
1075 //===========================================
1076 
1077 //GMainLoop *moGsGraph::loop = g_main_loop_new (NULL, FALSE);
1078 
1080  // m_pDevEnum = NULL;
1081  // m_pEnum = NULL;
1082 }
1083 
1084 
1086 
1087 
1088 
1089 }
1090 
1160 
1161  GstElement* device;
1162  #ifndef GSTVERSION
1163  GstPropertyProbe* probe;
1164  #endif
1165  GValueArray* va;
1166  GList *plist;
1167  GParamSpec* pm;
1168  GValue* vdefault;
1169  GValue valDef = { 0, };
1170  //GList* list=NULL;
1171  //guint i=0;
1172  gchar* device_name;
1173 
1174  MODebug2->Message( "moGsFramework::LoadCaptureDevices running..." );
1175 
1176  m_CaptureDevices.Empty();
1177 
1178 if (m_PreferredDevices.Count()==0) {
1179  moText cap_dev_name = moText("default");
1180  moCaptureDevice newdev;
1181  newdev.Present(true);
1182 
1183  newdev.SetName(cap_dev_name);
1184  newdev.SetLabelName("LIVEIN"+IntToStr(m_CaptureDevices.Count()));
1185 
1186  m_PreferredDevices.Add( newdev );
1187  }
1188  #ifdef MO_WIN32
1189  //m_CaptureDevices.Add( moCaptureDevice( moText("Laptop Integrated Webcam"), moText("webcam"), moText("-") ) );
1190  //m_CaptureDevices.Add( moCaptureDevice( moText("Default"), moText("-"), moText("-") ) );
1191  #ifdef GSTVERSION
1192  moText dname( "ksvideosrc" );
1193  #else
1194  moText dname( "dshowvideosrc" );
1195  #endif
1196  device_name = dname;
1197 
1198  for( MOuint i=0; i<m_PreferredDevices.Count();i++) {
1199  moCaptureDevice CaptDev = m_PreferredDevices[i];
1200  CaptDev.SetLabelName("LIVEIN"+IntToStr(m_CaptureDevices.Count()));
1201  AddCaptureDevice( CaptDev );
1202  MODebug2->Message( "moGsFramework::LoadCaptureDevices > Added preferred device: " + CaptDev.GetLabelName() );
1203  }
1204  //m_CaptureDevices.Add( moCaptureDevice( moText("Laptop Integrated Webcam"), moText("webcam"), moText("-") ) );
1205  //m_CaptureDevices.Add( moCaptureDevice( moText("Microsoft DV Camera and VCR"), moText("DV IEEE 1394"), moText("-"), 0 ) );
1206  //m_CaptureDevices.Add( moCaptureDevice( moText("VideoCAM Messenger"), moText("webcam"), moText("-") ) );
1207  //DIRECT SHOW TEST//
1208  /*
1209  HRESULT hr;
1210 
1211  // Create the System Device Enumerator.
1212  if(m_pDevEnum==NULL) {
1213  HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
1214  CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
1215  reinterpret_cast<void**>(&m_pDevEnum));
1216 
1217  if(SUCCEEDED(hr) && m_pEnum==NULL)
1218  {
1219  // Create an enumerator for the video capture category.
1220  hr = m_pDevEnum->CreateClassEnumerator(
1221  CLSID_VideoInputDeviceCategory,
1222  &m_pEnum, 0);
1223  } else {
1224  ShowError(hr);
1225  return &m_CaptureDevices;
1226  }
1227  }
1228  */
1229  #else
1230  #ifdef MO_MACOSX
1231  device_name = "wrappercamerabinsrc";
1232  #else
1233  device_name = moText("v4l2src");
1234  m_CaptureDevices.Add( moCaptureDevice( moText("Default"), moText("default"), moText("/dev/video0") ) );
1235  #endif
1236  // in linux: for v4l2src device could be /dev/video0 - /dev/video1 etc...
1237  //m_CaptureDevices.Add( moCaptureDevice( moText("Default"), moText("default") );
1238  //m_CaptureDevices.Add( moCaptureDevice( moText("Laptop Integrated Webcam"), moText("webcam"), moText("/dev/video0") ) );
1239  //m_CaptureDevices.Add( moCaptureDevice( moText("DV"), moText("DV IEEE 1394"), moText("-"), 0 ) );
1240 
1241  for(int i=0; i<m_PreferredDevices.Count();i++) {
1242  moCaptureDevice CaptDev = m_PreferredDevices[i];
1243  CaptDev.SetLabelName("LIVEIN"+IntToStr(m_CaptureDevices.Count()));
1244  AddCaptureDevice( CaptDev );
1245  MODebug2->Message( "moGsFramework::LoadCaptureDevices > Added preferred device: " + CaptDev.GetLabelName() );
1246  }
1247 
1248 
1249 
1250  #endif
1251 
1252 
1253 #ifndef GSTVERSION
1254  try {
1255 
1256  device = gst_element_factory_make (device_name, "source");
1257  gst_element_get_state(device, NULL, NULL, 5 * GST_SECOND);
1258  moText probepname = "device-name";
1259  if (!device || !GST_IS_PROPERTY_PROBE(device))
1260  goto finish;
1261  probe = GST_PROPERTY_PROBE (device);
1262  if (probe) {
1263  plist = (GList *)gst_property_probe_get_properties( probe );
1264  if (plist) {
1265  plist = (GList *)g_list_first(plist);
1266  do {
1267  pm = (GParamSpec *)plist->data;
1268  if (pm) {
1269  if (pm->name) {
1270  probepname = moText((char*)pm->name);
1271  MODebug2->Message( "moGsFramework::LoadCaptureDevices > probe property:"+probepname);
1272  va = gst_property_probe_get_values(probe, pm);
1273  if (va) {
1274  MODebug2->Message( "moGsFramework::LoadCaptureDevices > probe property:"+probepname+" has values!");
1275  }
1276  }
1277  }
1278  } while( plist=g_list_next(plist) );
1279  }
1280  }
1281  va = gst_property_probe_get_values_name (probe, (char*)probepname);
1282  //va = gst_property_probe_get_values_name (probe, "device");
1283  if (!va) {
1284  //TRY TO SET DEFAULT VALUE FROM PARAM SPEC
1285  g_value_init( &valDef, G_PARAM_SPEC_VALUE_TYPE(pm) );
1286  //vdefault = g_param_spec_get_default_value ( pm );
1287  g_param_value_set_default( pm, &valDef );
1288  vdefault = &valDef;
1289  if (vdefault) {
1290  moText defaultText(g_value_get_string( vdefault ));
1291  MODebug2->Message("moGsFramework::LoadCaptureDevices > Default value for: \""+moText((char*)probepname)+"\" is "+defaultText);
1292  //G_VALUE_TYPE_NAME(vdefault);
1293  moText cap_dev_name = defaultText;
1294  moCaptureDevice newdev;
1295  newdev.Present(true);
1296 
1297  newdev.SetName(cap_dev_name);
1298  newdev.SetLabelName("LIVEIN"+IntToStr(m_CaptureDevices.Count()));
1299 
1300  m_CaptureDevices.Add( newdev );
1301 
1302  MODebug2->Message( "moGsFramework::LoadCaptureDevices > Added Default capture device: " + newdev.GetName() + " label:" + newdev.GetLabelName() );
1303  }
1304  }
1305  if (!va)
1306  goto finish;
1307  for(guint i=0; i < va->n_values; ++i) {
1308  GValue* v = g_value_array_get_nth(va, i);
1309  //GArray* v = g_array_index(va, i);
1310  GString* stv = g_string_new( g_value_get_string(v) );
1311  if (stv) {
1312  moText cap_dev_name = moText((char*)stv->str);
1313  moCaptureDevice newdev;
1314  newdev.Present(true);
1315 
1316  newdev.SetName(cap_dev_name);
1317  newdev.SetLabelName("LIVEIN"+IntToStr(m_CaptureDevices.Count()));
1318 
1319  m_CaptureDevices.Add( newdev );
1320 
1321  MODebug2->Message( "moGsFramework::LoadCaptureDevices > Added capture device: " + newdev.GetName() + " label:" + newdev.GetLabelName() );
1322  }
1323  //list = g_list_append(list, );
1324  }
1325  g_value_array_free(va);
1326 
1327  finish:
1328  {
1329  gst_element_set_state (device, GST_STATE_NULL);
1330  gst_object_unref(GST_OBJECT (device));
1331  }
1332  }
1333  catch(...) {
1334  MODebug2->Error("moGsFramework::LoadCaptureDevices > exception error.");
1335  }
1336 #else
1337 #if (GST_VERSION_MINOR > 8)
1338  GstDeviceMonitor *monitor = NULL;
1339  GList *devices = NULL;
1340 
1341  monitor = gst_device_monitor_new();
1342  if (!gst_device_monitor_start (monitor))
1343  g_error ("Failed to start device monitor!");
1344 
1345  devices = gst_device_monitor_get_devices (monitor);
1346  int idev = 0;
1347  if (devices != NULL) {
1348  while (devices != NULL) {
1349  GstDevice *device = (GstDevice*)devices->data;
1350 
1351  gchar *device_class, *caps_str, *name;
1352  GstCaps *caps;
1353  guint i, size = 0;
1354 
1355  caps = gst_device_get_caps (device);
1356  if (caps != NULL)
1357  size = gst_caps_get_size (caps);
1358 
1359  name = gst_device_get_display_name (device);
1360  device_class = gst_device_get_device_class (device);
1361  for (i = 0; i < size; ++i) {
1362  GstStructure *s = gst_caps_get_structure (caps, i);
1363  caps_str = gst_structure_to_string (s);
1364  //g_print ("\t%s %s\n", (i == 0) ? "caps :" : " ", caps_str);
1365  MODebug2->Message( moText("LoadCaptureDevice > name: ") + moText(name) + moText("caps: ") + moText(caps_str) );
1366  g_free (caps_str);
1367  }
1368 
1369 
1370 
1371  if (idev==0) {
1372 
1373  moText cap_dev_name = name;
1374  moCaptureDevice newdev;
1375  newdev.Present(true);
1376 
1377  newdev.SetName(cap_dev_name);
1378  newdev.SetLabelName("LIVEIN"+IntToStr(m_CaptureDevices.Count()));
1379 
1380  m_CaptureDevices.Add( newdev );
1381 
1382  }
1383 
1384 
1385 
1386  //device_added (device);
1387  gst_object_unref (device);
1388  devices = g_list_remove_link (devices, devices);
1389  idev++;
1390  }
1391  } else {
1392  g_print ("No devices found!\n");
1393  }
1394 #endif // GST_VERSION_MINOR
1395 #endif
1396 
1398 
1399  return &m_CaptureDevices;
1400 
1401 }
1402 
1403 
1404 
1406 
1407 
1408  return &m_CaptureDevices;
1409 
1410 }
1411 
1412 
1413 bool
1416  i = 0;
1417 
1418  return false;
1419 
1420 }
1421 
1422 bool
1425 
1426  for(int i=0; i<(int)m_CaptureDevices.Count(); i++) {
1427  if ( m_CaptureDevices[i].GetName() == p_capdev.GetName() ) {
1428  return false;
1429  }
1430  }
1431 
1432  m_CaptureDevices.Add( p_capdev );
1433 
1434  MODebug2->Message( moText("Added capture device:") + p_capdev.GetName() );
1435 
1436  return true;
1437 }
1438 
1439 //===========================================
1440 //
1441 // Class: moGsGraph
1442 //
1443 //===========================================
1444 
1446 
1447  m_pGstBus = NULL;
1448  m_pGMainLoop = NULL;
1449  m_pGMainContext = NULL;
1450  m_pGstPipeline = NULL;
1451  m_pGsFramework = NULL;
1452 
1453  m_pFileSource = NULL;
1454  m_pFinalSource = NULL;
1455  m_pFileSink = NULL;
1456  m_pRTSPSource = NULL;
1457  m_pRTSPDepay = NULL;
1458  m_pHTTPSource = NULL;
1459  m_pMultipartDemux = NULL;
1460  m_pJpegDecode = NULL;
1461  m_pDecoderBin = NULL;
1462  m_pEncoder = NULL;
1463 
1464  m_pTypeFind = NULL;
1465  m_pCapsFilter = NULL;
1466  m_pFakeSink = NULL;
1467  m_pFakeSource = NULL;
1468  m_pIdentity = NULL;
1469 
1470  m_pBucketsPool = NULL;
1471  m_pVideoScale = NULL;
1472  m_pVideoBalance = NULL;
1473 
1474  m_pVideoDeinterlace = NULL;
1475  m_pColorSpaceInterlace = NULL;
1476  m_pColorSpace = NULL;
1477 
1478  m_pAudioConverter = NULL;
1479  m_pAudioConverter2 = NULL;
1480  m_pAudioConverter3 = NULL;
1481  m_pAudioConverter4 = NULL;
1482  m_pAudioEcho = NULL;
1483  m_pAudioPanorama = NULL;
1484  m_pAudioAmplify = NULL;
1485  m_pAudioSpeed = NULL;
1486  m_pAudioVolume = NULL;
1487  m_pAudioSink = NULL;
1488 
1489  m_pAudioPad = NULL;
1490  m_pVideoPad = NULL;
1491 
1492  signal_newpad_id = 0;
1493  signal_handoff_id = 0;
1495  m_bEOS = false;
1496 
1497 }
1498 
1500  //last try to release objects
1501  FinishGraph();
1502 }
1503 
1504 
1505 /*
1506  GstElementFactory *factory;
1507  GstElement * element;
1508 
1509  // init GStreamer
1510  gst_init (&argc, &argv);
1511 
1512  // create element, method #2
1513  factory = gst_element_factory_find ("fakesrc");
1514  if (!factory) {
1515  g_print ("Failed to find factory of type 'fakesrc'\n");
1516  return -1;
1517  }
1518  element = gst_element_factory_create (factory, "source");
1519  if (!element) {
1520  g_print ("Failed to create element, even though its factory exists!\n");
1521  return -1;
1522  }
1523 
1524  gst_object_unref (GST_OBJECT (element));
1525 
1526  return 0;
1527 
1528 */
1529 
1530  //INIT METHODS
1531 bool
1533 
1534  signal_newpad_id = 0;
1535  signal_handoff_id = 0;
1537  m_BusWatchId = 0;
1538  m_bEOS = false;
1539 
1540  //opner en el main de la consola...
1541  //inicialización de la libreria gstreamer
1542  //guint major, minor, micro, nano;
1543  //GError *errores;
1544 
1545  MODebug2->Message( moText("Initializing GStreamer"));
1546  //bool init_result = gst_init_check (NULL, NULL, &errores);
1547 
1548  //gst_init(NULL, NULL);
1549  //init_result = init_result && gst_controller_init(NULL,NULL);
1550 
1551  //gst_version (&major, &minor, &micro, &nano);
1552  //MODebug2->Message( moText("GStreamer version") + IntToStr(major) + moText(".") + IntToStr(minor) + moText(".") + IntToStr(minor));
1553  //char vers[10];
1554  //sprintf( vers, "version: %i.%i.%i.%i",major,minor, micro, nano);
1555 
1556  //if (init_result) MODebug2->Push(moText("Initializing GStreamer:OK "));
1557 
1558 //analogo a FilterGraph, con dos parametros para dar de alta el elemento: playbin
1559 //playbin
1560 //player
1561  MODebug2->Message( moText("creating pipeline"));
1562  m_pGstPipeline = gst_pipeline_new ("pipeline");
1563 
1564  //buscar un tipo de filtro: factory = gst_element_factory_find ("fakesrc");
1565  //generarlo: gst_element_factory_make ( factory, "player");
1566  //o gst_element_factory_make ("playbin", "player");
1567  //tomar el valor de una propiedad: g_object_get (G_OBJECT (element), "name", &name, NULL);
1568 
1569  MODebug2->Message( moText("creating bus interface"));
1570  m_pGstBus = gst_pipeline_get_bus (GST_PIPELINE (m_pGstPipeline));
1571  m_BusWatchId = gst_bus_add_watch ( (GstBus*)m_pGstBus, bus_call, this );
1572  gst_object_unref (m_pGstBus);
1573  m_pGstBus = NULL;
1574 
1575 /*
1576  GMainLoop *loop = g_main_loop_new( NULL, FALSE);
1577  m_pGMainLoop = (moGMainLoop*) loop;
1578  if (loop) {
1579  m_pGMainContext = (moGMainContext*) g_main_loop_get_context( loop );
1580  }
1581  */
1582  m_pGMainContext = (moGMainContext*) g_main_context_default();
1583  //fin inicialización
1584 
1585 /*
1586  m_pGstPipeline = gst_element_factory_make ("playbin", "play");
1587  g_object_set (G_OBJECT (m_pGstPipeline), "uri", "file:///home/fabri/plasma.mpg", NULL);
1588 
1589  m_pGstBus = gst_pipeline_get_bus (GST_PIPELINE (m_pGstPipeline));
1590  gst_bus_add_watch (m_pGstBus, bus_call, loop);
1591  gst_object_unref (m_pGstBus);
1592 
1593 
1594  CheckState( gst_element_set_state (m_pGstPipeline, GST_STATE_PAUSED), true );
1595 */
1596  /* now run */
1597 
1598  //g_main_loop_run (moGsGraph::loop);
1599  MODebug2->Message( moText("moGsGraph::Init result:") + moText(((m_pGstPipeline!=NULL) ? "success" : "failure")) );
1600  m_bInitialized = m_pGstPipeline!=NULL;
1601  return (m_bInitialized);
1602 }
1603 
1604 
1605 bool
1607 
1608  if (IsRunning()) {
1609  Stop();
1610  }
1611 
1612  if (m_BusWatchId!=0) {
1613  if (!g_source_remove(m_BusWatchId)) {
1614  MODebug2->Error(moText("Error releasing bus call watch:") + IntToStr(m_BusWatchId));
1615  } else m_BusWatchId = 0;
1616  }
1617 
1618 
1619  if (m_pGMainLoop) {
1620 
1621  g_main_loop_quit( (GMainLoop*) m_pGMainLoop );
1622  g_main_loop_unref( (GMainLoop*) m_pGMainLoop);
1623 
1624  m_pGMainLoop = NULL;
1625  m_pGMainContext = NULL;
1626  }
1627 
1628  if (m_pColorSpace) {
1629 #ifndef GSTVERSION
1630  GstPad* srcRGB = gst_element_get_pad ( (GstElement*)m_pColorSpace, "src");
1631  if (srcRGB && cb_have_data_handler_id) gst_pad_remove_buffer_probe ( srcRGB, cb_have_data_handler_id );
1632 #endif
1634  }
1635 
1636  if (m_pColorSpaceInterlace) {
1637 #ifndef GSTVERSION
1638  GstPad* srcRGB = gst_element_get_pad ( (GstElement*)m_pColorSpaceInterlace, "src");
1639  if (srcRGB && cb_have_data_handler_id) gst_pad_remove_buffer_probe ( srcRGB, cb_have_data_handler_id );
1640 #endif
1642  }
1643 
1644 
1645  if (m_pFileSource) {
1646  //gst_object_unref( (GstElement*) m_pFileSource);
1647  m_pFileSource = NULL;
1648  }
1649 
1650  if (m_pJpegDecode) {
1651  //gst_object_unref( (GstElement*) m_pJpegDecode);
1652  m_pJpegDecode = NULL;
1653  }
1654 
1655  if (m_pMultipartDemux) {
1656  //gst_object_unref( (GstElement*) m_pMultipartDemux);
1657  m_pMultipartDemux = NULL;
1658  }
1659 
1660  if (m_pHTTPSource) {
1661  //gst_object_unref( (GstElement*) m_pHTTPSource);
1662  m_pHTTPSource = NULL;
1663  }
1664 
1665  if (m_pRTSPDepay) {
1666  //gst_object_unref( (GstElement*) m_pRTSPDepay);
1667  m_pRTSPDepay = NULL;
1668  }
1669 
1670  if (m_pRTSPSource) {
1671  //gst_object_unref( (GstElement*) m_pRTSPSource);
1672  m_pRTSPSource = NULL;
1673  }
1674 
1676  if (m_pFinalSource) {
1677  m_pFinalSource = NULL;
1678  }
1679 
1680  if (m_pColorSpace) {
1681  //gst_object_unref( (GstElement*) m_pColorSpace);
1682  m_pColorSpace = NULL;
1683  }
1684 
1685  if (m_pColorSpaceInterlace) {
1686  //gst_object_unref( (GstElement*) m_pColorSpaceInterlace);
1687  m_pColorSpaceInterlace = NULL;
1688  }
1689 
1690  if (m_pCapsFilter) {
1691  //gst_object_unref( (GstElement*) m_pCapsFilter);
1692  m_pCapsFilter = NULL;
1693  }
1694 
1695  if (m_pDecoderBin) {
1696  if (g_signal_handler_is_connected((GstElement*)m_pDecoderBin, signal_newpad_id))
1697  g_signal_handler_disconnect ( (GstElement*)m_pDecoderBin, signal_newpad_id );
1698  signal_newpad_id = 0;
1699  //gst_object_unref( (GstElement*) m_pDecoderBin);
1700  m_pDecoderBin = NULL;
1701  }
1702 
1703  if (m_pFakeSink) {
1704  //gst_object_unref( (GstElement*) m_pFakeSink);
1705  m_pFakeSink = NULL;
1706  }
1707 
1708  if (m_pAudioConverter) {
1709  //gst_object_unref( (GstElement*) m_pAudioConverter);
1710  m_pAudioConverter = NULL;
1711  }
1712 
1713  if (m_pAudioSink) {
1714  //gst_object_unref( (GstElement*) m_pAudioSink);
1715  m_pAudioSink = NULL;
1716  }
1717 
1718  if (m_pAudioPad) {
1719  //gst_object_unref( (GstPad*) m_pAudioPad);
1720  m_pAudioPad = NULL;
1721  }
1722 
1723  if (m_pVideoPad) {
1724  //gst_object_unref( (GstPad*) m_pVideoPad);
1725  m_pVideoPad = NULL;
1726  }
1727 
1728  if (m_pFakeSource) {
1729  if (g_signal_handler_is_connected((GstElement*)m_pFakeSource, signal_handoff_id))
1730  g_signal_handler_disconnect ( (GstElement*)m_pFakeSource, signal_handoff_id );
1731  signal_handoff_id = 0;
1732  //gst_object_unref( (GstElement*) m_pFakeSource);
1733  m_pFakeSource = NULL;
1734  }
1735 
1736  if (m_pFileSink) {
1737  //gst_object_unref( (GstElement*) m_pFileSink);
1738  m_pFileSink = NULL;
1739  }
1740 
1741  if (m_pGstBus) {
1742  //gst_object_unref( (GstElement*) m_pGstBus);
1743  m_pGstBus = NULL;
1744  }
1745 
1746  if (m_pVideoDeinterlace) {
1747  gst_object_unref( (GstElement*) m_pVideoDeinterlace);
1748  m_pVideoDeinterlace = NULL;
1749  }
1750 
1751  if (m_pVideoScale) {
1752  //gst_object_unref( (GstElement*) m_pVideoScale);
1753  m_pVideoScale = NULL;
1754  }
1755 
1757  if (m_pGstPipeline) {
1758  gst_object_unref( (GstElement*) m_pGstPipeline);
1759  m_pGstPipeline = NULL;
1760  }
1761 
1762 
1763  return false;
1764 }
1765 
1766 bool
1768  return m_bEOS;
1769 }
1770 
1771 void
1772 moGsGraph::SetEOS(bool iseos) {
1773  m_bEOS = iseos;
1774 }
1775 
1776 
1777 
1778 //FILTER METHODS
1779 bool
1781 
1783  deviceport = "";
1784  idevice = 0;
1785  return false;
1786 }
1787 
1788 
1789 void
1790 moGsGraph::CopyVideoFrame( void* bufferdst, int size ) {
1791 
1792  //int ttid = m_pDirectorCore->GetResourceManager()->GetTextureMan()->GetTextureMOId( moText("preview_texture"), false);
1793  if (m_pBucketsPool) {
1794  moBucket* pBucket = m_pBucketsPool->RetreiveBucket();
1795 
1796  if (pBucket) {
1797  void* pbuf = pBucket->GetBuffer();
1798 
1799  pBucket->Lock();
1800  memcpy( bufferdst, (void*)pbuf, size );
1801  pBucket->Unlock();
1802  m_pBucketsPool->DestroyRetreivedBucket();
1803 
1804  }
1805 
1806  }
1807 
1808 }
1809 
1820 bool
1822 
1823  return BuildLiveWebcamGraph( pBucketsPool, p_capdev );
1824 }
1825 
1840 bool
1843  p_location = "";
1844  pBucketsPool = NULL;
1845  return false;
1846 }
1847 
1853 bool
1856  m_pBucketsPool = pBucketsPool;
1857  bool link_result = false;
1858  /*
1859  bool b_sourceselect = false;
1860  bool b_forcevideoscale = false;
1861  bool b_forcevideoflip = false;
1862  */
1863  //gchar* checkval;
1864  bool res = false;
1865 
1866 
1867  if (filename.Length()>0)
1868  {
1869 
1870  m_pFakeSource = gst_element_factory_make ("fakesrc", "source");
1871 
1872  /* setup fake source */
1873  if (m_pFakeSource) {
1874  g_object_set (G_OBJECT (m_pFakeSource),
1875  "signal-handoffs", TRUE,
1876  "sizemax", 400 * 300 * 3,
1877  "silent", TRUE,
1878  "sync", TRUE,
1879  "num-buffers", 30*200,
1880  "sizetype", 2, NULL);
1881  #ifndef GSTVERSION
1882  signal_handoff_id = g_signal_connect (m_pFakeSource, "handoff", G_CALLBACK (cb_handoff), this);
1883  #endif
1884 
1885  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFakeSource );
1886  }
1887 
1888 
1889  m_pCapsFilter = gst_element_factory_make ("capsfilter", "filtsource");
1890  if (m_pCapsFilter) {
1891  g_object_set (G_OBJECT (m_pCapsFilter), "caps", gst_caps_new_simple ("video/x-raw-rgb",
1892  "width", G_TYPE_INT, 400,
1893  "height", G_TYPE_INT, 300,
1894  "framerate", GST_TYPE_FRACTION, 10, 1,
1895  "bpp", G_TYPE_INT, 24,
1896  "depth", G_TYPE_INT, 24,
1897  "red_mask",G_TYPE_INT, 255,
1898  "green_mask",G_TYPE_INT, 65280,
1899  "blue_mask",G_TYPE_INT, 16711680,
1900  "endianness", G_TYPE_INT, 4321,
1901  NULL), NULL);
1902  //depth=(int)24, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, endianness=(int)4321
1903  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter );
1904  }
1905 
1906 
1907  m_pColorSpace = gst_element_factory_make (VIDEOCONVERT, "color");
1908  if (m_pColorSpace) {
1909  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pColorSpace );
1910  }
1911 
1912  link_result = gst_element_link_many( (GstElement*) m_pFakeSource, (GstElement*) m_pCapsFilter, (GstElement*) m_pColorSpace, NULL );
1913 
1914  if (link_result) {
1915 
1916  m_pEncoder = gst_element_factory_make( "ffenc_mpeg1video", "encoder");
1917  if (m_pEncoder) {
1918  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pEncoder );
1919  }
1920 
1921  m_pMultiplexer = gst_element_factory_make( "ffmux_mpeg", "multiplexer");
1922  if (m_pMultiplexer) {
1923  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pMultiplexer );
1924  }
1925 
1926  m_pFileSink = gst_element_factory_make( "filesink", "filesink");
1927  if (m_pFileSink) {
1928  g_object_set (G_OBJECT (m_pFileSink), "location", (char*)filename, NULL);
1929  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFileSink );
1930  }
1931 
1932  link_result = gst_element_link_many( (GstElement*) m_pColorSpace, (GstElement*) m_pEncoder, (GstElement*) m_pMultiplexer, (GstElement*) m_pFileSink, NULL );
1933  //link_result = gst_element_link_many( (GstElement*) m_pColorSpace, (GstElement*) m_pEncoder, NULL );
1934  //link_result = gst_element_link_many( (GstElement*) m_pColorSpace, (GstElement*) m_pEncoder, (GstElement*) m_pMultiplexer, NULL );
1935 
1936  if (link_result) {
1937  //if (CheckState( gst_element_set_state ((GstElement*) m_pGstPipeline, GST_STATE_PLAYING), false /*SYNCRUNASLI*/ )) {
1938  gst_element_set_state ( (GstElement*) m_pGstPipeline, GST_STATE_PLAYING);
1939 
1940  return true;
1941  //}
1942  }
1943  } else return false;
1944 
1945  }
1946 
1947  return false;
1948 }
1949 
1950 
1951 bool
1953 
1955  moCaptureDevice pp = p_capdev;
1956  pBucketsPool = NULL;
1957  return true;
1958 }
1959 
1980 bool
1982 
1983  m_pBucketsPool = pBucketsPool;
1984  GstCaps *caps = NULL;
1985  bool link_result = false;
1986 
1987  bool b_sourceselect = false;
1988  bool b_forcevideoscale = false;
1989  bool b_forcevideoflip = false;
1990 
1991  bool b_forcevideointerlace = false;
1992 
1993  //gchar* checkval;
1994  bool res = false;
1995  //GstPadLinkReturn ret_padlink;
1996 
1997  moGstElement* m_pColorSpaceSource = NULL;
1998 
1999  moGstElement* m_pCapsFilterSource = NULL;
2000  moGstElement* m_pCapsFilter2 = NULL;
2001 
2002  moText labelname;
2003  moText devicename;
2004  MOint p_sourcewidth;
2005  MOint p_sourceheight;
2006  MOint p_sourcebpp;
2007  MOint p_forcewidth;
2008  MOint p_forceheight;
2009  MOint p_forceflipH;
2010  MOint p_forceflipV;
2011  moText colormode;
2012 
2013 
2014  labelname = p_capdev.GetLabelName();
2015 
2016  devicename = p_capdev.GetName();
2017  switch( p_capdev.GetVideoFormat().m_ColorMode) {
2018  case YUV:
2019  colormode = moText("video/x-raw-yuv");
2020  break;
2021  case RGB:
2022  colormode = moText("video/x-raw-rgb");
2023  break;
2024  default:
2025  colormode = "";
2026  break;
2027  };
2028  p_sourcewidth = p_capdev.GetSourceWidth();
2029  p_sourceheight = p_capdev.GetSourceHeight();
2030  p_sourcebpp = p_capdev.GetSourceBpp();
2031 
2032  p_forcewidth = p_capdev.GetVideoFormat().m_Width;
2033  p_forceheight = p_capdev.GetVideoFormat().m_Height;
2034  p_forceflipH = p_capdev.GetSourceFlipH();
2035  p_forceflipV = p_capdev.GetSourceFlipV();
2036 
2037  if (p_forcewidth!=0 || p_forceheight!=0) {
2038  b_forcevideoscale = true;
2039  }
2040 
2041  if (p_forceflipH!=0 || p_forceflipV!=0) {
2042  b_forcevideoflip = true;
2043  }
2044 
2045  if (p_sourcewidth!=0 || p_sourceheight!=0) {
2046  b_sourceselect = true;
2047  }
2048 
2049  if (devicename.Length()>0)
2050  {
2051 
2052  std::string dname;
2053 
2054  dname = devicename;
2055 
2056  if (labelname==moText("RTSP")) {
2057 
2058  m_pRTSPSource = gst_element_factory_make ("rtspsrc", "source");
2059  m_pRTSPDepay = gst_element_factory_make ("rtpmp4vdepay", "depay");
2060 
2061  if (m_pRTSPDepay) {
2062  m_pRTSPDepaySink = gst_element_get_static_pad ( (GstElement*)m_pRTSPDepay, "sink" );
2063 #ifndef GSTVERSION
2064 signal_rtsppad_added_id = g_signal_connect (m_pRTSPSource, "pad-added", G_CALLBACK (on_rtsppadd_added), (gpointer)this);
2065 #endif
2066  }
2067  } else if (labelname==moText("HTTP") || dname.find("http")==0 ) {
2068  m_pHTTPSource = gst_element_factory_make ("souphttpsrc", "source");
2069  //needed for decodebin2 TODO: check this in gstreamer 1.0
2070  //m_pMultipartDemux = gst_element_factory_make ("multipartdemux", "demux");
2071  if ( m_pHTTPSource && m_pMultipartDemux ) {
2072  //signal_rtsppad_added_id = g_signal_connect ( m_pMultipartDemux, "pad-added", G_CALLBACK (on_rtsppadd_added), (gpointer)this);
2073  }
2074  }
2075  else {
2076 
2077  #ifdef MO_WIN32
2078  #ifdef GSTVERSION
2079  m_pFileSource = gst_element_factory_make ("ksvideosrc", "source");
2080  #else
2081  m_pFileSource = gst_element_factory_make ("dshowvideosrc", "source");
2082  #endif
2083  #else
2084  #ifdef GSTVERSION
2085  #ifdef MO_MACOSX
2086  m_pFileSource = gst_element_factory_make ("wrappercamerabinsrc", "source");
2087  cout << "wrappercamerabinsrc created!" << endl;
2088  #else
2089  if (devicename==moText("DV"))
2090  m_pFileSource = gst_element_factory_make ("dv1394src", "source");
2091  else {
2092  //m_pFileSource = gst_element_factory_make ("rpicamsrc", "source");
2093  //g_object_set (G_OBJECT (m_pFileSource), "preview", (bool)false, NULL);
2094  //g_object_set (G_OBJECT (m_pFileSource), "sensor-mode", (int)6, NULL);
2095  m_pFileSource = gst_element_factory_make ("v4l2src", "source");
2096  }
2097  #endif
2098 
2099  #else
2100  if (devicename==moText("DV"))
2101  m_pFileSource = gst_element_factory_make ("dv1394src", "source");
2102  else
2103  m_pFileSource = gst_element_factory_make ("v4l2src", "source");
2104  #endif
2105  #endif
2106 
2107  m_pFinalSource = m_pFileSource;
2108  }
2109 
2110  if (m_pRTSPDepay && m_pRTSPSource) {
2111  if (devicename.Length() > 0 && ( devicename!=moText("default")) ) {
2112  g_object_set (G_OBJECT (m_pRTSPSource), "location", (char*)devicename, NULL);
2113  g_object_set (G_OBJECT (m_pRTSPSource), "latency", (guint) 0, NULL);
2114  g_object_set (G_OBJECT (m_pRTSPSource), "debug", (gboolean) true, NULL);
2115 
2116  g_object_set (G_OBJECT (m_pRTSPSource), "protocols", (guint) 0x00000004, NULL);
2117 
2118  }
2119  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pRTSPSource );
2120  if (res) {
2122  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pRTSPDepay );
2123  if (res) {
2126 
2128  link_result = true;
2129 
2130 
2131  }
2132 
2133  }
2134 
2135  if (link_result) {
2136  m_pFinalSource = m_pRTSPDepay;
2137  } else {
2138  m_pFinalSource = NULL;
2139  }
2140 
2141  }
2142 
2144  if ( m_pHTTPSource /*&& m_pMultipartDemux*/ ) {
2145 
2146  g_object_set (G_OBJECT (m_pHTTPSource), "location", (char*)devicename, NULL);
2147  g_object_set (G_OBJECT (m_pHTTPSource), "automatic-redirect", TRUE, NULL);
2148 
2149  //g_object_set (G_OBJECT (m_pRTSPSource), "latency", (guint) 0, NULL);
2150  //g_object_set (G_OBJECT (m_pRTSPSource), "debug", (gboolean) true, NULL);
2151  //g_object_set (G_OBJECT (m_pRTSPSource), "protocols", (guint) 0x00000004, NULL);
2152 
2153 
2154  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pHTTPSource );
2155  //needed for decoderbin2 (version 2) not for version 1
2156  //res = res && gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pMultipartDemux );
2157  link_result = false;
2158  if (res) {
2159  //link_result = gst_element_link_many( (GstElement*) m_pHTTPSource, (GstElement*) m_pMultipartDemux, NULL );
2160  link_result = true;
2161  }
2162 
2163  if ( link_result ) {
2164  //m_pFinalSource = m_pMultipartDemux;
2165  m_pFinalSource = m_pHTTPSource;
2166  m_pDecoderBin = gst_element_factory_make ( "decodebin", "decoder");
2167  } else {
2168  MODebug2->Error(moText("moGsGraph::BuildLiveWebcamGraph > SOUP HTTP source failed linking with MultipartDemux"));
2169  m_pFinalSource = NULL;
2170  }
2171 
2172  }
2173 
2174 
2176  if (m_pFileSource) {
2177  #ifdef MO_WIN32
2178  devicename.ToLower();
2179  if (devicename.Length() > 0 && ( devicename!=moText("default")) ) {
2180  g_object_set (G_OBJECT (m_pFileSource), "device-name", (char*)devicename, NULL);
2181  }
2182  #else
2183  if (devicename==moText("DV") ) {
2184  g_object_set (G_OBJECT (m_pFileSource), "port", 0, NULL);
2185  } else {
2186  devicename.ToLower();
2187  if ( devicename.Length() > 0 && ( devicename!=moText("default") ) ) {
2188  if (devicename.Find( "/dev/" )==0 ) {
2189  g_object_set (G_OBJECT (m_pFileSource), "device", (char*)devicename, NULL);
2190  } else {
2191  g_object_set (G_OBJECT (m_pFileSource), "device-name", (char*)devicename, NULL);
2192  }
2193  }
2194  }
2195  #endif
2196 
2197  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFileSource );
2198  MODebug2->Message( moText("filesrc created! > devicename: ") + (moText)devicename );
2199  m_pFinalSource = m_pFileSource;
2200  }
2201 
2202  if (m_pFinalSource) {
2203  //g_object_get (G_OBJECT (m_pFileSource), "location", &checkval, NULL);
2204  //GstElement *filter = gst_element_factory_make ("capsfilter", "filter");
2205  //g_object_set (G_OBJECT (m_pFileSource), "pattern", GST_VIDEO_TEST_SRC_SNOW, NULL);
2206  //res = gst_pad_set_caps( gst_element_get_pad( m_pFileSource, "src" ), NULL);
2207 
2208  GstIterator* iterator = NULL;
2209  iterator = gst_element_iterate_src_pads( (GstElement*) m_pFinalSource );
2210 
2211 #ifndef GSTVERSION
2212  gpointer item;
2213 #else
2214  GValue item = G_VALUE_INIT;
2215 #endif
2216  GstPad* srcpad = NULL;
2217  GstCaps* itemcaps = NULL;
2218  GstCaps* capstpl = NULL;
2219  GstCaps* capsQuery = NULL;
2220  GstPad* peerPad = NULL;
2221 
2222 
2223  //GstPad* sinkpad = NULL;
2224 
2225  moText padname;
2226  moText icapsstr;
2227 
2228  bool done = FALSE;
2229  while (!done) {
2230 #ifndef GSTVERSION
2231  switch (gst_iterator_next (iterator, &item)) {
2232 #else
2233  switch (gst_iterator_next (iterator, &item)) {
2234 #endif
2235  case GST_ITERATOR_OK:
2236  //... use/change item here...
2237 #ifndef GSTVERSION
2238  srcpad = (GstPad*)item;
2239 #else
2240  srcpad = (GstPad*)g_value_dup_object (&item);
2241 #endif
2242  padname = gst_object_get_name((GstObject*) srcpad );
2243 
2244  MODebug2->Message( moText("filesrc src pad: checking caps: ") + (moText)padname );
2245 
2246 #ifndef GSTVERSION
2247  itemcaps = gst_pad_get_caps( srcpad );
2248 #else
2249  itemcaps = gst_pad_get_current_caps( srcpad );
2250  capstpl = gst_pad_get_pad_template_caps( srcpad );
2251  capsQuery = gst_pad_query_caps( srcpad, NULL );
2252  peerPad = gst_pad_get_peer( srcpad );
2253  //if (peerPad==NULL)
2254 
2255  //gst_pad_peer_query_caps()
2256 #endif
2257 
2258  if (capsQuery) {
2259 
2260  icapsstr = moText( gst_caps_to_string(capsQuery) );
2261  MODebug2->Message(icapsstr);
2262  }
2263  //gst_object_unref (item);
2264 #ifdef GSTVERSION
2265  g_value_reset (&item);
2266 #endif
2267  break;
2268  case GST_ITERATOR_RESYNC:
2269  //...rollback changes to items...
2270  gst_iterator_resync (iterator);
2271  break;
2272  case GST_ITERATOR_ERROR:
2273  //...wrong parameters were given...
2274  done = TRUE;
2275  break;
2276  case GST_ITERATOR_DONE:
2277  done = TRUE;
2278  break;
2279  }
2280  }
2281  gst_iterator_free (iterator);
2282 
2283  //queue = gst_element_factory_make("queue", "vqueue");
2284 //b_sourceselect = true;
2285 //colormode = "";
2286 
2287  if (b_sourceselect) {
2288  #ifdef MO_WIN32
2289  #ifdef GSTVERSION
2290  b_sourceselect = false;
2291  #endif // GSTVERSION
2292  #endif // WIN32
2293  }
2294 
2295  if (b_sourceselect) {
2296  MODebug2->Message(moText("moGsGraph:: sourceselect:") + (moText)colormode
2297  + moText(" ") + IntToStr(p_sourcewidth)
2298  + moText("X") + IntToStr(p_sourceheight)
2299  + moText(" bpp:") + IntToStr(p_sourcebpp));
2300  m_pCapsFilterSource = gst_element_factory_make ("capsfilter", "filtsource");
2301 
2302  if (m_pCapsFilterSource) {
2308 #ifndef GSTVERSION
2309  MODebug2->Message("colormode: "+ colormode );
2310  if (colormode=="") colormode = "video/x-raw-yuv";
2311  //if (colormode=="") colormode = "video/x-raw-rgb";
2312  g_object_set (G_OBJECT (m_pCapsFilterSource), "caps", gst_caps_new_simple ( colormode,
2313  "width", G_TYPE_INT, p_sourcewidth,
2314  "height", G_TYPE_INT, p_sourceheight,
2315  "depth", G_TYPE_INT, 24,
2316  "red_mask",G_TYPE_INT, 16711680,
2317  "green_mask",G_TYPE_INT, 65280,
2318  "blue_mask",G_TYPE_INT, 255,
2319  NULL), NULL);
2320 #else
2321 //
2322  moText colormodef = "";
2323 
2324  int opt_framerate = 15;
2325  if (colormode=="") {
2326  colormode = "video/x-raw";
2327  /*
2328  colormodef = "BGR";
2329  moText fullf = colormode+ ","+ colormodef;
2330  MODebug2->Message("moGsGraph::BuildLiveWebcamGraph > p_sourcewidth:" + fullf );
2331 
2332  g_object_set (G_OBJECT (m_pCapsFilterSource), "caps", gst_caps_new_simple ( colormode,
2333  "format", G_TYPE_STRING, (char*)colormodef,
2334  "width", G_TYPE_INT, p_sourcewidth,
2335  "height", G_TYPE_INT, p_sourceheight,
2336  "framerate", GST_TYPE_FRACTION, opt_framerate, 1,
2337  NULL), NULL);
2338  */
2339  //colormodef = "UYVY";
2340  colormodef = "RGB";
2341  moText fullf = colormode+ ","+ colormodef;
2342  MODebug2->Message( moText("moGsGraph::BuildLiveWebcamGraph > (colormode, format): (") + fullf + moText(")") );
2343  //opt_framerate = 30;
2344  g_object_set (G_OBJECT (m_pCapsFilterSource), "caps", gst_caps_new_simple ( colormode,
2345  "format", G_TYPE_STRING, (char*)colormodef,
2346  "width", G_TYPE_INT, p_sourcewidth,
2347  "height", G_TYPE_INT, p_sourceheight,
2348  "framerate", GST_TYPE_FRACTION, opt_framerate, 1,
2349  NULL), NULL);
2350  } else {
2351 
2352  colormode="video/x-raw-yuv";
2353 
2354  if (colormode=="video/x-raw-rgb") {
2355  colormodef = "RGB";
2356  } else if (colormode=="video/x-raw-yuv") {
2357  colormodef = "YUV";
2358  }
2359 
2360  colormode="video/x-raw";
2361 
2362  g_object_set (G_OBJECT (m_pCapsFilterSource), "caps", gst_caps_new_simple ( colormode,
2363  //"format", G_TYPE_STRING, "I420",
2364  /*"format", G_TYPE_STRING, (char*)colormodef,*/
2365  "width", G_TYPE_INT, p_sourcewidth,
2366  "height", G_TYPE_INT, p_sourceheight,
2367  NULL), NULL);
2368 
2369  }
2370 
2371 
2372 #endif
2373  //depth=(int)24, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, endianness=(int)4321
2374  /*
2375  "bpp", G_TYPE_INT, p_sourcebpp,
2376  "depth", G_TYPE_INT, 24,
2377  "red_mask",G_TYPE_INT, 255,
2378  "green_mask",G_TYPE_INT, 65280,
2379  "blue_mask",G_TYPE_INT, 16711680,
2380  "endianness", G_TYPE_INT, 4321
2381  */
2382  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilterSource );
2383  if (res) { MODebug2->Message(moText("moGsGraph:: added capsfilter source!") ) ; }
2384  else MODebug2->Error(moText("moGsGraph:: adding capsfilter source..."));
2385  }
2386  }
2387 
2388  b_forcevideoscale = false;
2389  if (b_forcevideoscale) {
2390 
2391  m_pVideoScale = gst_element_factory_make ("videoscale", "scale");
2392  if (m_pVideoScale) {
2393  int method = 0;
2394  colormode = "video/x-raw";
2395  MODebug2->Message(moText("moGsGraph:: creating videoscale!") ) ;
2396 #ifndef GSTVERSION
2397  g_object_set (G_OBJECT (m_pVideoScale), "method", &method, NULL);
2398 #endif
2399  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pVideoScale );
2400 
2401  m_pCapsFilter2 = gst_element_factory_make ("capsfilter", "filt2");
2402  if (m_pCapsFilter2) {
2403  if (b_forcevideoscale) {
2404  g_object_set (G_OBJECT (m_pCapsFilter2), "caps", gst_caps_new_simple ( colormode,
2405  "width", G_TYPE_INT, p_forcewidth,
2406  "height", G_TYPE_INT, p_forceheight,
2407  NULL), NULL);
2408  } else {
2409  g_object_set (G_OBJECT (m_pCapsFilter2), "caps", gst_caps_new_simple ( colormode,
2410  "width", G_TYPE_INT, 240,
2411  "height", G_TYPE_INT, 160,
2412  NULL), NULL);
2413  }
2414  //depth=(int)24, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, endianness=(int)4321
2415  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter2 );
2416  }
2417 
2418 
2419  }
2420  }
2421 
2422  b_forcevideointerlace = false;
2423  if (b_forcevideointerlace) {
2424  m_pColorSpaceInterlace = gst_element_factory_make (VIDEOCONVERT, "colordeinterlace");
2425  if (m_pColorSpaceInterlace) {
2426  MODebug2->Message(moText("moGsGraph:: created videoconvert before deinterlace!") ) ;
2427  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pColorSpaceInterlace );
2428  }
2429 
2430 
2431  m_pVideoDeinterlace = gst_element_factory_make ("ffdeinterlace", "deinterlace");
2432  if (m_pVideoDeinterlace) {
2433  //int tff = 2;//bottom field first
2434  //g_object_set (G_OBJECT (m_pVideoDeinterlace), "tff", &tff, NULL);
2435  MODebug2->Message(moText("moGsGraph:: created ffdeinterlace!") ) ;
2436  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pVideoDeinterlace );
2437  }
2438  }
2439 
2440  m_pColorSpace = gst_element_factory_make (VIDEOCONVERT, "color");
2441  if (m_pColorSpace) {
2442  MODebug2->Message(moText("moGsGraph:: created videoconvert for final color!") ) ;
2443  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pColorSpace );
2444  }
2445 
2446  m_pCapsFilter = gst_element_factory_make ("capsfilter", "filt");
2447  if (m_pCapsFilter) {
2448  MODebug2->Message(moText("moGsGraph:: created last capsfilter!") ) ;
2449 #ifndef GSTVERSION
2450  g_object_set (G_OBJECT (m_pCapsFilter), "caps", gst_caps_new_simple ("video/x-raw-rgb",
2451  "bpp", G_TYPE_INT, 24,
2452  "depth", G_TYPE_INT, 24,
2453  "red_mask",G_TYPE_INT, 255,
2454  "green_mask",G_TYPE_INT, 65280,
2455  "blue_mask",G_TYPE_INT, 16711680,
2456  "endianness", G_TYPE_INT, 4321,
2457  NULL), NULL);
2458  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter );
2459 #else
2460  caps = gst_caps_new_simple ( "video/x-raw",
2461  "format", G_TYPE_STRING, "RGB",
2462  NULL);
2463  g_object_set (G_OBJECT (m_pCapsFilter), "caps", caps, NULL);
2464  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter );
2465 
2466 #endif
2467  //depth=(int)24, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, endianness=(int)4321
2468 
2469  }
2470 
2471  //RetreivePads( m_pFileSource );
2472 /*
2473  m_pAudioConverter = gst_element_factory_make ("audioresample", "resample");
2474 
2475  if (m_pAudioConverter) {
2476  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pAudioConverter );
2477  }
2478  */
2479 
2480 
2481  if (m_pDecoderBin==NULL) m_pDecoderBin = gst_element_factory_make ( DECODEBIN, "decoder");
2482  if (m_pDecoderBin) {
2483  MODebug2->Message(moText("moGsGraph:: created decoder bin! ") + DECODEBIN ) ;
2484  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pDecoderBin );
2485 #ifndef GSTVERSION
2486  signal_newpad_id = g_signal_connect (m_pDecoderBin, "new-decoded-pad", G_CALLBACK (cb_newpad), (gpointer)this);
2487 #else
2488  signal_newpad_id = g_signal_connect (m_pDecoderBin, "pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)this);
2489  MODebug2->Message( moText("moGsGraph:: added signal to Decoder Bin, \"pad-added\": ") + IntToStr(signal_newpad_id) ) ;
2490 #endif
2491 
2492 #ifndef GSTVERSION
2493  m_pFakeSink = gst_element_factory_make ("fakesink", "destout");
2494 #else
2495  cout << "creating FakeSink from appsink" << endl;
2496  m_pFakeSink = gst_element_factory_make ("appsink", "destout");
2497 #endif
2498  //RetreivePads( m_pFakeSink );
2499  if (m_pFakeSink) {
2500  MODebug2->Message(moText("moGsGraph:: created FakeSink! ") ) ;
2501 #ifdef GSTVERSION
2502  g_object_set (G_OBJECT (m_pFakeSink), "caps", caps, NULL);
2503  g_object_set (G_OBJECT (m_pFakeSink), "sync", false, NULL);
2504  g_object_set (G_OBJECT (m_pFakeSink), "drop", true, NULL);
2505 #endif
2506  res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFakeSink );
2507 
2508 
2509  MODebug2->Message(moText("moGsGraph:: Try linkage!! sourceselect?: ") + IntToStr(b_sourceselect) ) ;
2510  if (b_sourceselect) {
2511  cout << "linking m_pFinalSource, m_pCapsFilterSource, m_pDecoderBin" << endl;
2512  link_result = gst_element_link_many( (GstElement*) m_pFinalSource, (GstElement*) m_pCapsFilterSource, (GstElement*) m_pDecoderBin, NULL );
2513 
2514  } else {
2515  cout << "linking m_pFinalSource, m_pDecoderBin" << endl;
2516  link_result = gst_element_link_many( (GstElement*) m_pFinalSource, (GstElement*) m_pDecoderBin, NULL );
2517  }
2518 
2519 
2520  if (link_result) {
2521  MODebug2->Message(moText("moGsGraph:: Source linkage ok! ") ) ;
2522  if (b_forcevideoscale) {
2523  cout << "linking forcing videoscale" << endl;
2524  if (b_forcevideointerlace)
2525  link_result = gst_element_link_many( (GstElement*) m_pVideoScale, (GstElement*)m_pCapsFilter2, (GstElement*) m_pColorSpaceInterlace, (GstElement*) m_pVideoDeinterlace, (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
2526  else
2527  link_result = gst_element_link_many( (GstElement*) m_pVideoScale, (GstElement*)m_pCapsFilter2, (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
2528 
2529  //old deinterlace
2530  //link_result = gst_element_link_many( (GstElement*) m_pVideoDeinterlace, (GstElement*) m_pVideoScale, (GstElement*)m_pCapsFilter2, (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
2531  } else {
2532  cout << "linking no videoscale" << endl;
2533  //link_result = gst_element_link_many( (GstElement*) m_pVideoDeinterlace, (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
2534  if (b_forcevideointerlace) {
2535  cout << "linking m_pColorSpaceInterlace, m_pVideoDeinterlace, m_pColorSpace, m_pCapsFilter, m_pFakeSink" << endl;
2536  link_result = gst_element_link_many( (GstElement*) m_pColorSpaceInterlace, (GstElement*) m_pVideoDeinterlace, (GstElement*)m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
2537  } else {
2538  cout << "linking m_pColorSpace, /*m_pCapsFilter*/, m_pFakeSink" << endl;
2539  link_result = gst_element_link_many(
2540  (GstElement*) m_pColorSpace,
2541 #ifndef GSTVERSION
2542  (GstElement*) m_pCapsFilter,
2543 #endif
2544  (GstElement*) m_pFakeSink, NULL );
2545 
2546 
2547  }
2548  //link_result = gst_element_link_filtered( (GstElement*) m_pColorSpace, (GstElement*) m_pFakeSink, NULL );
2549  //link_result = gst_element_link_many( (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
2550  }
2551 
2552  if (link_result) {
2553 MODebug2->Message( moText("moGsGraph::BuildLiveWebcamGraph > play pipeline"));
2554  CheckState( gst_element_set_state ((GstElement*) m_pGstPipeline, GST_STATE_PLAYING), true /*SYNCRUNASLI*/ );
2555  //GetState();
2556 MODebug2->Message( moText("moGsGraph::BuildLiveWebcamGraph > GST_STATE_PLAYING > OK"));
2557 #ifdef GSTVERSION
2558  GstSample *sample;
2559  MODebug2->Message( moText("moGsGraph::BuildLiveWebcamGraph > gst_app_sink_pull_preroll for appsink"));
2560  //g_signal_emit_by_name ( m_pFakeSink, "pull-sample", &sample, NULL);
2561 
2562  sample = gst_app_sink_pull_preroll( (GstAppSink*) m_pFakeSink );
2563  if (sample) {
2564  MODebug2->Message( moText("moGsGraph::BuildLiveWebcamGraph > RECEIVED sample from gst_app_sink_pull_preroll!"));
2565  GstBuffer *Gbuffer;
2566  GstCaps *bcaps;
2567  GstStructure *bstr;
2568 
2573  bcaps = gst_sample_get_caps( sample );
2574  if (bcaps) {
2575  Gbuffer = gst_sample_get_buffer (sample);
2576  SetVideoFormat( bcaps, Gbuffer );
2577  gst_app_sink_set_emit_signals((GstAppSink*)m_pFakeSink, true);
2578  gst_app_sink_set_drop((GstAppSink*)m_pFakeSink, true);
2579  //g_object_set (G_OBJECT (m_pFakeSink), "sync", false, NULL);
2580  gst_app_sink_set_max_buffers((GstAppSink*)m_pFakeSink, 1);
2581  g_signal_connect( (GstElement*)m_pFakeSink, "new-sample", G_CALLBACK (appsink_new_sample), (gpointer)this );
2582  //gst_app_sink_set_callbacks( (GstAppSink*)m_pFakeSink, )
2583 
2584  }
2585  } else MODebug2->Error( moText("moGsGraph::BuildLiveWebcamGraph > NO sample from gst_app_sink_pull_preroll!"));
2586  MODebug2->Message( moText("moGsGraph::BuildLiveWebcamGraph > gst_app_sink_pull_preroll for appsink ended"));
2587 
2588 #else
2589  WaitForFormatDefinition( 1600 );
2590 #endif
2591 
2592  MODebug2->Message( moText("moGsGraph::BuildLiveWebcamGraph > graph builded"));
2593  //cout << "state gstreamer finish" << endl;
2594 
2595  //event_loop( (GstElement*) m_pGstPipeline, false, GST_STATE_PAUSED);
2596 
2597  return true;
2598 
2599  } else {
2600  MODebug2->Error(moText("moGsGraph::BuildLiveWebcamGraph > m_pColorSpace m_pCapsFilter m_pFakeSink linking failed"));
2601  event_loop( (GstElement*) m_pGstPipeline, false, GST_STATE_PAUSED);
2602  }
2603  } else {
2604  MODebug2->Error(moText("moGsGraph::BuildLiveWebcamGraph > src and decodebin linkage failed: ") + devicename );
2605  event_loop( (GstElement*) m_pGstPipeline, false, GST_STATE_PAUSED);
2606  }
2607 
2608  } else {
2609  MODebug2->Error(moText("moGsGraph::BuildLiveWebcamGraph > fakesink construction failed"));
2610  event_loop( (GstElement*) m_pGstPipeline, false, GST_STATE_PAUSED);
2611  }
2612  } else {
2613  MODebug2->Error(moText("moGsGraph::BuildLiveWebcamGraph > decodebin construction failed"));
2614  event_loop( (GstElement*) m_pGstPipeline, false, GST_STATE_PAUSED);
2615  }
2616  } else {
2617  MODebug2->Error(moText("moGsGraph::BuildLiveWebcamGraph > final source failed"));
2618  event_loop( (GstElement*) m_pGstPipeline, false, GST_STATE_PAUSED);
2619  }
2620  return false;
2621 
2622 
2623  }
2624 
2625  return true;
2626 }
2627 
2628 bool moGsGraph::BuildLiveQTVideoGraph( moText filename , moBucketsPool *pBucketsPool ) {
2629 
2630  return BuildLiveVideoGraph( filename, pBucketsPool );
2631 
2632 }
2633 
2634 
2635 void
2637 
2638  GstIterator* piter;
2639  GstPad* ppad;
2640  gchar* nname;
2641 #ifndef GSTVERSION
2642  gpointer ppointer;
2643 #else
2644  GValue gvalue = G_VALUE_INIT;
2645 #endif
2646  bool done;
2647  bool res = false;
2648 
2649  piter = gst_element_iterate_pads( (GstElement*)FilterElement );
2650 
2651  done = FALSE;
2652  while (!done) {
2653 #ifndef GSTVERSION
2654  switch (gst_iterator_next (piter, &ppointer)) {
2655 #else
2656  switch (gst_iterator_next (piter, &gvalue)) {
2657 #endif
2658  case GST_ITERATOR_OK:
2659  //... use/change item here...
2660 #ifndef GSTVERSION
2661  ppad = (GstPad*) ppointer;
2662 #else
2663  ppad = (GstPad*) g_value_dup_object( &gvalue );
2664 #endif
2665  nname = gst_pad_get_name(ppad);
2666  res = gst_pad_is_active(ppad);
2667  res = gst_pad_is_linked(ppad);
2668  res = gst_pad_is_blocking(ppad);
2669 #ifndef GSTVERSION
2670  gst_object_unref (ppointer);
2671 #else
2672  g_value_reset( &gvalue );
2673 #endif
2674  break;
2675 
2676  case GST_ITERATOR_RESYNC:
2677  //...rollback changes to items...
2678  gst_iterator_resync (piter);
2679  break;
2680 
2681  case GST_ITERATOR_ERROR:
2682  //...wrong parameter were given...
2683  done = TRUE;
2684  break;
2685 
2686  case GST_ITERATOR_DONE:
2687  done = TRUE;
2688  break;
2689  }
2690  }
2691  gst_iterator_free (piter);done = FALSE;
2692 
2693  return;
2694 }
2695 /*
2696 bool
2697 moGsGraph::BuildTestGraph( moBucketsPool *pBucketsPool ) {
2698 typedef enum {
2699  GST_VIDEO_TEST_SRC_SMPTE,
2700  GST_VIDEO_TEST_SRC_SNOW,
2701  GST_VIDEO_TEST_SRC_BLACK,
2702  GST_VIDEO_TEST_SRC_WHITE,
2703  GST_VIDEO_TEST_SRC_RED,
2704  GST_VIDEO_TEST_SRC_GREEN,
2705  GST_VIDEO_TEST_SRC_BLUE,
2706  GST_VIDEO_TEST_SRC_CHECKERS1,
2707  GST_VIDEO_TEST_SRC_CHECKERS2,
2708  GST_VIDEO_TEST_SRC_CHECKERS4,
2709  GST_VIDEO_TEST_SRC_CHECKERS8,
2710  GST_VIDEO_TEST_SRC_CIRCULAR,
2711  GST_VIDEO_TEST_SRC_BLINK
2712 } GstVideoTestSrcPattern;
2713 
2714 }*/
2715 
2716 void
2718 
2719  MOulong time0 = moGetTicksAbsolute();
2720  MOulong time1 = time0;
2721 
2722  //cout << "waiting for format definition..." << timeout << endl;
2723 
2724  while((time1 - time0) < timeout) {
2726  return;
2727  }
2728  time1 = moGetTicksAbsolute();
2729  //cout << (time1 - time0) << endl;
2730  continue;
2731  }
2732  //cout << "elapsed:" << (time1 - time0) << "m_WaitForFormat:" << m_VideoFormat.m_WaitForFormat << "w:" << m_VideoFormat.m_Width << " x h:" << m_VideoFormat.m_Height << endl;
2733  MODebug2->Error("moGsGraph::WaitForFormatDefinition > time out !!! " + IntToStr(timeout) + " ms elapsed!");
2734 }
2735 
2736 
2738 
2739  bool link_result = false;
2740 // gchar* checkval;
2741  bool res = false;
2742 
2743  MODebug2->Push( moText("Building live sound:") + (moText)filename);
2744 
2745  moFile SoundFile( filename );
2746 
2747  if ( !SoundFile.Exists() ) return false;
2748 
2749  if (filename.Length()>0)
2750  {
2751 
2752  moText extension = filename;
2753  extension.Right(4);
2754 
2755  m_pFileSource = gst_element_factory_make ("filesrc", "source");
2756 
2757  if (m_pFileSource) {
2758 
2759  g_object_set (G_OBJECT (m_pFileSource), "location", (char*)filename/*("///home/fabri/jp5.avi")*/, NULL);
2760 
2761  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pFileSource );
2762 
2763  m_pAudioConverter = NULL;
2764 /*
2765 
2766 
2767  if (m_pAudioConverter) {
2768  res = gst_bin_add (GST_BIN (m_pGstPipeline), m_pAudioConverter );
2769  }
2770 */
2771  if (extension==moText(".wav")) {
2772  m_pAudioConverter = gst_element_factory_make ("audioresample", "resample");
2773  // MODebug2->Push( "moGsGraph:: wav file" );
2774  } else {
2775  m_pAudioConverter = gst_element_factory_make ("audioconvert", "converter");
2776  }
2777 
2778  if (m_pAudioConverter) {
2779  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter );
2780  }
2781 
2782  m_pAudioSink = gst_element_factory_make ("autoaudiosink", "audioout");
2783 
2784  if (m_pAudioSink) {
2785  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioSink );
2786  }
2787 
2792  m_pAudioSpeed = gst_element_factory_make ("speed", "speed");
2793 
2794  if (m_pAudioSpeed) {
2795  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioSpeed );
2796  }
2797 
2798  m_pAudioVolume = gst_element_factory_make ("volume", "volume");
2799 
2800  if (m_pAudioVolume) {
2801  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioVolume );
2802  }
2803 
2804  m_pAudioPanorama = gst_element_factory_make ("audiopanorama", "audiopanorama");
2805 
2806  if (m_pAudioPanorama) {
2807  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioPanorama );
2808  }
2809 
2810  m_pAudioConverter2 = gst_element_factory_make ("audioconvert", "audioconvert2");
2811 
2812  if (m_pAudioConverter2) {
2813  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter2 );
2814  }
2815 
2816  m_pAudioConverter3 = gst_element_factory_make ("audioconvert", "audioconvert3");
2817 
2818  if (m_pAudioConverter3) {
2819  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter3 );
2820  }
2821 
2822 /*
2823  m_pAudioEcho = gst_element_factory_make ("audioecho", "audioecho");
2824 
2825 
2826  if (m_pAudioEcho) {
2827  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioEcho );
2828  unsigned long long max_delay,delay;
2829  max_delay = 2000000000;
2830  delay = 0;
2831  float intensity = 0.0;
2832 
2833  g_object_set ( (GstElement*)m_pAudioEcho, "max-delay", max_delay, NULL);
2834  g_object_set ( (GstElement*)m_pAudioEcho, "delay", delay, NULL);
2835  g_object_set ( (GstElement*)m_pAudioEcho, "intensity", intensity, NULL);
2836  }
2837 */
2838 
2839  m_pAudioConverter4 = gst_element_factory_make ("audioconvert", "audioconvert4");
2840 
2841  if (m_pAudioConverter4) {
2842  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter4 );
2843  }
2844 
2845  m_pDecoderBin = gst_element_factory_make ( DECODEBIN, "decoder");
2846  if (m_pDecoderBin) {
2847 #ifndef GSTVERSION
2848  signal_newpad_id = g_signal_connect ((GstElement*)m_pDecoderBin, "new-decoded-pad", G_CALLBACK (cb_newpad), (gpointer)this);
2849 #else
2850  signal_newpad_id = g_signal_connect ((GstElement*)m_pDecoderBin, "pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)this);
2851 #endif
2852  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pDecoderBin );
2853  }
2854 
2855 
2856  //signal_id = g_signal_connect (m_pWavParser, "new-decoded-pad", G_CALLBACK (cb_newpad), (gpointer)this);
2857  link_result = gst_element_link_many( (GstElement*)m_pFileSource, (GstElement*)m_pDecoderBin, NULL );
2858 
2859  if (link_result) {
2860  /*
2861  if (m_pAudioConverter) link_result = gst_element_link_many(
2862  (GstElement*)m_pAudioConverter,
2863  (GstElement*)m_pAudioSpeed,
2864  (GstElement*)m_pAudioConverter2,
2865  (GstElement*)m_pAudioPanorama,
2866  (GstElement*)m_pAudioConverter3,
2867  (GstElement*)m_pAudioEcho,
2868  (GstElement*)m_pAudioConverter4,
2869  (GstElement*)m_pAudioVolume,
2870  (GstElement*)m_pAudioSink,
2871  NULL
2872  );
2873  */
2874  if (m_pAudioConverter) link_result = gst_element_link_many(
2875  (GstElement*)m_pAudioConverter,
2876  (GstElement*)m_pAudioSpeed,
2877  (GstElement*)m_pAudioConverter2,
2878  (GstElement*)m_pAudioPanorama,
2879  (GstElement*)m_pAudioConverter3,
2880  (GstElement*)m_pAudioVolume,
2881  (GstElement*)m_pAudioConverter4,
2882  (GstElement*)m_pAudioSink,
2883  NULL
2884  );
2885  //else link_result = gst_element_link_many( (GstElement*)m_pAudioSink, NULL );
2886 
2887  if (link_result) {
2888 
2889  CheckState( gst_element_set_state ((GstElement*)m_pGstPipeline, GST_STATE_PAUSED), true /*SYNCRUNASLI*/ );
2890 
2891  //WaitForFormatDefinition( 1600 );
2892 
2893  cout << "state gstreamer finish" << endl;
2894 
2895  return true;
2896 
2897  } else {
2898  MODebug2->Error(moText("moGsGraph::error: m_pAudioConverter m_pAudioResample m_pAudioSink linking failed"));
2899  event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
2900  }
2901  } else {
2902  MODebug2->Error(moText("moGsGraph::error: m_pFileSource m_pWavParser linking failed"));
2903  event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
2904  }
2905 
2906  }
2907 
2908 
2909 
2910 
2911  }
2912 
2913  return false;
2914 }
2915 
2916 
2918 
2919  //BuildLock.Lock();
2920  bool res = false;
2921 
2922  if (m_pGstPipeline) {
2923  m_pAudioConverter = gst_element_factory_make ("audioconvert", "convert");
2924 
2925  if (m_pAudioConverter) {
2926  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter );
2927  }
2928 
2929  m_pAudioVolume = gst_element_factory_make ("volume", "volume");
2930 
2931  if (m_pAudioVolume) {
2932  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioVolume );
2933  }
2934 
2935  m_pAudioPanorama = gst_element_factory_make ("audiopanorama", "balance");
2936 
2937  if (m_pAudioPanorama) {
2938  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioPanorama );
2939  }
2940 
2941  m_pAudioSink = gst_element_factory_make ("autoaudiosink", "audioout");
2942 
2943  if (m_pAudioSink) {
2944  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioSink );
2945  }
2946  }
2947  //BuildLock.Unlock();
2948 
2949 }
2950 
2951 
2952 bool moGsGraph::BuildLiveVideoGraph( moText filename , moBucketsPool *pBucketsPool ) {
2953 
2954  m_pBucketsPool = pBucketsPool;
2955  bool link_result = false;
2956 // gchar* checkval;
2957  bool res = false;
2958 
2959  moFile VideoFile( filename );
2960 
2961  if ( !VideoFile.Exists() ) return false;
2962 
2963  //if (filename.Length()>0)
2964  {
2965 
2966  m_pFileSource = gst_element_factory_make ("filesrc", "source");
2967 
2968  if (m_pFileSource) {
2969  g_object_set (G_OBJECT (m_pFileSource), "location", (char*)filename/*("///home/fabri/jp5.avi")*/, NULL);
2970  //g_object_get (G_OBJECT (m_pFileSource), "location", &checkval, NULL);
2971  //GstElement *filter = gst_element_factory_make ("capsfilter", "filter");
2972  //g_object_set (G_OBJECT (m_pFileSource), "pattern", GST_VIDEO_TEST_SRC_SNOW, NULL);
2973  //res = gst_pad_set_caps( gst_element_get_pad( m_pFileSource, "src" ), NULL);
2974 
2975 
2976  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pFileSource );
2977 
2978 
2979  m_pColorSpaceInterlace = gst_element_factory_make (VIDEOCONVERT, "color0");
2980  if (m_pColorSpaceInterlace) {
2981  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pColorSpaceInterlace );
2982  }
2983 
2984  m_pVideoBalance = gst_element_factory_make ("videobalance", "videobalance");
2985  if (m_pVideoBalance) {
2986  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pVideoBalance );
2987  }
2988 
2989  m_pColorSpace = gst_element_factory_make (VIDEOCONVERT, "color");
2990  if (m_pColorSpace) {
2991  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pColorSpace );
2992  }
2993 /*
2994  m_pCapsFilter = gst_element_factory_make ("capsfilter", "filt");
2995  if (m_pCapsFilter) {
2996 #ifndef GSTVERSION
2997 
2998  g_object_set (G_OBJECT ((GstElement*)m_pCapsFilter), "caps", gst_caps_new_simple ("video/x-raw-rgb",
2999  "bpp", G_TYPE_INT, 24,
3000  "depth", G_TYPE_INT, 24,
3001  "red_mask",G_TYPE_INT, 255,
3002  "green_mask",G_TYPE_INT, 65280,
3003  "blue_mask",G_TYPE_INT, 16711680,
3004  "endianness", G_TYPE_INT, 4321,
3005  NULL), NULL);
3006  //depth=(int)24, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, endianness=(int)4321
3007  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pCapsFilter );
3008 
3009 #else
3010  g_object_set (G_OBJECT (m_pCapsFilter), "caps", gst_caps_new_simple ( "video/x-raw",
3011  "format", G_TYPE_STRING, "RGB",
3012  NULL), NULL);
3013  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pCapsFilter ); NULL), NULL);
3014 #endif
3015 
3016  }*/
3017  //RetreivePads( m_pFileSource );
3018 
3021 
3023 
3024  m_pDecoderBin = gst_element_factory_make ( DECODEBIN, "decoder");
3025  if (m_pDecoderBin) {
3026  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pDecoderBin );
3027 
3028  m_pFakeSink = gst_element_factory_make ("fakesink", "destout");
3029 #ifndef GSTVERSION
3030  //signal_newpad_id = g_signal_connect (m_pDecoderBin, "pad-added", G_CALLBACK (cb_pad_added), (gpointer)this);
3031  signal_newpad_id = g_signal_connect (m_pDecoderBin, "new-decoded-pad", G_CALLBACK (cb_newpad), (gpointer)this);
3032 #else
3033  signal_newpad_id = g_signal_connect (m_pDecoderBin, "pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)this);
3034  MODebug2->Message( moText("moGsGraph:: added signal to Decoder Bin, \"pad-added\": ") + IntToStr(signal_newpad_id) ) ;
3035 #endif
3036 
3037 #ifndef GSTVERSION
3038  m_pFakeSink = gst_element_factory_make ("fakesink", "destout");
3039 #else
3040  cout << "creating FakeSink from appsink" << endl;
3041  m_pFakeSink = gst_element_factory_make ("appsink", "destout");
3042 #endif
3043  //RetreivePads( m_pFakeSink );
3044  if (m_pFakeSink) {
3045  MODebug2->Message(moText("moGsGraph:: created FakeSink! ") ) ;
3046 #ifdef GSTVERSION
3047  g_object_set (G_OBJECT (m_pFakeSink), "caps", gst_caps_new_simple ( "video/x-raw",
3048  "format", G_TYPE_STRING, "RGB",
3049  NULL), NULL);
3050  g_object_set (G_OBJECT (m_pFakeSink), "sync", (bool)true, NULL);
3051  g_object_set (G_OBJECT (m_pFakeSink), "drop", true, NULL);
3052  //gst_app_sink_set_emit_signals( (GstAppSink*)m_pFakeSink, true);
3053  gst_app_sink_set_max_buffers( (GstAppSink*)m_pFakeSink, 100 );
3054 #else
3055  g_object_set (G_OBJECT (m_pFakeSink), "sync", (bool)true, NULL);
3057 
3058 #endif
3059  res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pFakeSink );
3060 
3061  link_result = gst_element_link_many( (GstElement*)m_pFileSource, (GstElement*)m_pDecoderBin, NULL );
3062  if (link_result) {
3063 #ifndef GSTVERSION
3064  if (m_pVideoBalance)
3065  link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pVideoBalance, (GstElement*)m_pColorSpace, (GstElement*)m_pCapsFilter, (GstElement*)m_pFakeSink, NULL );
3066  else
3067  link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pColorSpace, (GstElement*)m_pCapsFilter, (GstElement*)m_pFakeSink, NULL );
3068 #else
3069  if (m_pVideoBalance)
3070  link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pVideoBalance, (GstElement*)m_pColorSpace, (GstElement*)m_pFakeSink, NULL );
3071  else
3072  link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pColorSpace, (GstElement*)m_pFakeSink, NULL );
3073 #endif
3074 
3076  if (m_pAudioConverter)
3077  bool link_audio_result = gst_element_link_many( (GstElement*)m_pAudioConverter, (GstElement*)m_pAudioVolume, (GstElement*)m_pAudioPanorama, (GstElement*)m_pAudioSink, NULL );
3078 
3079 
3080  if (link_result) {
3081 
3082  CheckState( gst_element_set_state ((GstElement*)m_pGstPipeline, GST_STATE_PAUSED), true /*SYNCRUNASLI*/ );
3083  MODebug2->Message( moText("moGsGraph::BuildLiveVideoGraph > GST_STATE_PAUSED > OK"));
3084 #ifdef GSTVERSION
3085  GstSample *sample;
3086  MODebug2->Message( moText("moGsGraph::BuildLiveVideoGraph > gst_app_sink_pull_preroll for appsink"));
3087  //g_signal_emit_by_name ( m_pFakeSink, "pull-sample", &sample, NULL);
3088 
3089  sample = gst_app_sink_pull_preroll( (GstAppSink*) m_pFakeSink );
3090  if (sample) {
3091  GstBuffer *Gbuffer;
3092  GstCaps *bcaps;
3093  GstStructure *bstr;
3094 
3099  bcaps = gst_sample_get_caps( sample );
3100  if (bcaps) {
3101  Gbuffer = gst_sample_get_buffer (sample);
3102  SetVideoFormat( bcaps, Gbuffer );
3103  gst_app_sink_set_emit_signals((GstAppSink*)m_pFakeSink, true);
3104  gst_app_sink_set_drop((GstAppSink*)m_pFakeSink, true);
3105  //g_object_set (G_OBJECT (m_pFakeSink), "sync", false, NULL);
3106  gst_app_sink_set_max_buffers((GstAppSink*)m_pFakeSink, 10000 );
3107  g_signal_connect( (GstElement*)m_pFakeSink, "new-sample", G_CALLBACK (appsink_new_sample), (gpointer)this );
3108  //gst_app_sink_set_callbacks( (GstAppSink*)m_pFakeSink, )
3109 
3110  }
3111  } else {
3112  MODebug2->Error( moText("moGsGraph::BuildLiveVideoGraph > no sample!"));
3113  cout << "gst_app_sink_is_eos: " << gst_app_sink_is_eos((GstAppSink*)m_pFakeSink) << endl;
3114  cout << "gst_app_sink_get_emit_signals: " << gst_app_sink_get_emit_signals((GstAppSink*)m_pFakeSink) << endl;
3115  cout << "gst_app_sink_get_max_buffers: " << gst_app_sink_get_max_buffers((GstAppSink*)m_pFakeSink) << endl;
3116  }
3117 #else
3118  WaitForFormatDefinition( 3000 );
3119 #endif
3120 
3121  MODebug2->Message( moText("moGsGraph::BuildLiveVideoGraph > graph builded"));
3122 
3123  //event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
3124 
3125  return true;
3126 
3127  } else {
3128  MODebug2->Error( moText("moGsGraph::BuildLiveVideoGraph > m_pColorSpace m_pCapsFilter m_pFakeSink linking failed"));
3129  event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
3130  }
3131  } else {
3132  MODebug2->Error( moText("moGsGraph::BuildLiveVideoGraph > filesrc and decodebin linkage failed: ") + filename );
3133  event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
3134  }
3135 
3136  } else {
3137  MODebug2->Error( moText("moGsGraph::BuildLiveVideoGraph > fakesink construction failed"));
3138  event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
3139  }
3140  } else {
3141  MODebug2->Error( moText("moGsGraph::BuildLiveVideoGraph > decodebin construction failed"));
3142  event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
3143  }
3144  } else {
3145  MODebug2->Error( moText("moGsGraph::BuildLiveVideoGraph > file source failed: ") + filename);
3146  event_loop( (GstElement*)m_pGstPipeline, false, GST_STATE_PAUSED);
3147  }
3148  return false;
3149 
3150  /*
3151  GstPad *pad;
3152  pad = gst_element_get_pad (m_pDecoderBin, "src0");
3153  gst_pad_add_buffer_probe (pad, G_CALLBACK (cb_have_data), NULL);
3154  gst_object_unref (pad);
3155  */
3156 
3157 
3158  }
3159 
3160  /*SETTING SOURCE*/
3161  /*
3162  // set the source audio file
3163  g_object_set (player, "location", "helloworld.ogg", NULL);
3164  */
3165 
3166  /*more complex*/
3167  /*
3168  // create elements
3169  pipeline = gst_pipeline_new ("my_pipeline");
3170  source = gst_element_factory_make ("filesrc", "source");
3171  g_object_set (source, "location", argv[1], NULL);
3172  demux = gst_element_factory_make ("oggdemux", "demuxer");
3173 
3174  // you would normally check that the elements were created properly
3175 
3176  // put together a pipeline
3177  gst_bin_add_many (GST_BIN (pipeline), source, demux, NULL);
3178  gst_element_link_pads (source, "src", demux, "sink");
3179 
3180  // listen for newly created pads
3181  g_signal_connect (demux, "pad-added", G_CALLBACK (cb_new_pad), NULL);
3182 */
3183 
3184 /*
3185  // create elements
3186  source = gst_element_factory_make ("fakesrc", "source");
3187  filter = gst_element_factory_make ("identity", "filter");
3188  sink = gst_element_factory_make ("fakesink", "sink");
3189 
3190  // must add elements to pipeline before linking them
3191  gst_bin_add_many (GST_BIN (pipeline), source, filter, sink, NULL);
3192 
3193  // link
3194  if (!gst_element_link_many (source, filter, sink, NULL)) {
3195  g_warning ("Failed to link elements!");
3196  }
3197 
3198 */
3199 
3200 
3201 /*putting pipelines into pipelines!!!! */
3202 /*
3203  //
3204  pipeline = gst_pipeline_new ("my_pipeline");
3205  bin = gst_pipeline_new ("my_bin");
3206  source = gst_element_factory_make ("fakesrc", "source");
3207  sink = gst_element_factory_make ("fakesink", "sink");
3208 
3209  // set up pipeline
3210  gst_bin_add_many (GST_BIN (bin), source, sink, NULL);
3211  gst_bin_add (GST_BIN (pipeline), bin);
3212  gst_element_link (source, sink);
3213  */
3214 
3215 
3216 
3217  /*getting specific properties of caps*/
3218 /*
3219  static void
3220 read_video_props (GstCaps *caps)
3221 {
3222  gint width, height;
3223  const GstStructure *str;
3224 
3225  g_return_if_fail (gst_caps_is_fixed (caps));
3226 
3227  str = gst_caps_get_structure (caps, 0);
3228  if (!gst_structure_get_int (str, "width", &width) ||
3229  !gst_structure_get_int (str, "height", &height)) {
3230  g_print ("No width/height available\n");
3231  return;
3232  }
3233 
3234  g_print ("The video size of this set of capabilities is %dx%d\n",
3235  width, height);
3236 }
3237 */
3238 
3239 /*CREATE GHOST PAD FOR A BIN ATTACHED TO THE FIRST IN-PAD*/
3240 /*
3241  GstElement *bin, *sink;
3242  GstPad *pad;
3243 
3244  // init
3245  gst_init (&argc, &argv);
3246 
3247  // create element, add to bin
3248  sink = gst_element_factory_make ("fakesink", "sink");
3249  bin = gst_bin_new ("mybin");
3250  gst_bin_add (GST_BIN (bin), sink);
3251 
3252  //add ghostpad
3253  pad = gst_element_get_pad (sink, "sink");
3254  gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad));
3255  gst_object_unref (GST_OBJECT (pad));
3256 
3257 */
3258  return false;
3259 
3260 }
3261 
3262 
3263 /*
3264 tipicos de gstreamer:
3265 format(fourcc);
3266 bpp=(int)32,
3267 depth=(int)24,
3268 endianness=(int)4321,
3269 red_mask=(int)-16777216,
3270 green_mask=(int)16711680,
3271 blue_mask=(int)65280,
3272 width=(int)[ 2, 2147483647 ],
3273 height=(int)[ 2, 2147483647 ],
3274 framerate=(fraction)[ 0/1, 2147483647/1 ];
3275 */
3276 
3277 void
3279 
3280  bool isfixed = false;
3281  GstBuffer* Gbuffer = (GstBuffer*)buffer;
3282 
3283  isfixed = gst_caps_is_fixed((GstCaps*)caps);
3284 
3285 
3286  if (!isfixed) {
3287 
3288  return;
3289  }
3290 
3291  GstStructure* str;
3292  str = gst_caps_get_structure ((GstCaps*)caps, 0);
3293 
3294  const gchar *sstr;
3295 
3296  sstr = gst_structure_to_string (str);
3297 
3298  //cout << "SetVideoFormat: we have a format!!" << sstr << endl;
3299 
3300  if (g_strrstr( sstr, "channels" )) {
3301 
3302  //to calculate framerate
3303  gint width, depth;
3304  //gint value_numerator, value_denominator;
3305  gint channels, rate;
3306 
3307  gst_structure_get_int( str, "width", &width);
3308  gst_structure_get_int( str, "depth", &depth);
3309  gst_structure_get_int( str, "channels", &channels);
3310  gst_structure_get_int( str, "rate", &rate);
3311  //gst_structure_get_int( str, "height", &height);
3312  //gst_structure_get_fraction( str, "framerate", &value_numerator, &value_denominator );
3313 
3314  m_AudioFormat.m_Width = (MOuint)width;
3315  m_AudioFormat.m_Depth = (MOuint)depth;
3316  m_AudioFormat.m_Channels = (MOuint)channels;
3319 /*
3320  m_AudioFormat.m_Width = (MOuint)width;
3321  m_AudioFormat.m_Height = (MOuint)height;
3322  m_AudioFormat.m_FrameRate = (value_numerator * 100) / value_denominator;
3323  //cout << "Width:" << m_AudioFormat.m_Width << endl;
3324  //cout << "Height:" << m_AudioFormat.m_Height << endl;
3325  //cout << "Framerate:" << m_AudioFormat.m_FrameRate << endl;
3326 
3327  //m_AudioFormat.m_BitCount = pVih->bmiHeader.biBitCount;
3328  //m_AudioFormat.m_BitRate = pVih->dwBitRate;
3329  */
3330  if (Gbuffer!=NULL) {
3331  m_AudioFormat.m_TimePerSample = Gbuffer->duration;
3332 #ifndef GSTVERSION
3333  m_AudioFormat.m_BufferSize = Gbuffer->size;
3334 #else
3335  m_AudioFormat.m_BufferSize = gst_buffer_get_size( Gbuffer );
3336 #endif
3337  }
3338  //m_AudioFormat.SetVideoMode();
3340 
3341  }
3342 
3343  MODebug2->Message(
3344  "SetAudioFormat: we have a format!! "
3346  + " Channels, "
3348  + " Hz, "
3350  + " bits, "
3352  + " bytes per buffer, "
3354  + " nanoseconds per sample "
3355 
3356  );
3357 
3358 
3359 }
3360 
3361 void
3363 
3364  bool isfixed = false;
3365  GstBuffer* Gbuffer = (GstBuffer*)buffer;
3366 
3367  isfixed = gst_caps_is_fixed((GstCaps*)caps);
3368 
3369 
3370  if (!isfixed) {
3371 
3372  return;
3373  }
3374 
3375  GstStructure* str;
3376  str = gst_caps_get_structure ((GstCaps*)caps, 0);
3377 
3378  const gchar *sstr;
3379 
3380  sstr = gst_structure_to_string (str);
3381 
3382  //cout << "SetVideoFormat: we have a format!!" << sstr << endl;
3383 
3384  if (g_strrstr( sstr, "width" )) {
3385 
3386  //to calculate framerate
3387  gint width, height, value_numerator, value_denominator, redmask, greenmask, bluemask, bitcount;
3388 
3389  gst_structure_get_int( str, "width", &width);
3390  gst_structure_get_int( str, "height", &height);
3391  gst_structure_get_fraction( str, "framerate", &value_numerator, &value_denominator );
3392  gst_structure_get_int( str, "red_mask", &redmask );
3393  gst_structure_get_int( str, "green_mask", &greenmask );
3394  gst_structure_get_int( str, "blue_mask", &bluemask );
3395  gst_structure_get_int( str, "bpp", &bitcount );
3396 
3397  m_VideoFormat.m_Width = (MOuint)width;
3398  m_VideoFormat.m_Height = (MOuint)height;
3399  m_VideoFormat.m_FrameRate = (value_numerator * 100) / value_denominator;
3400  m_VideoFormat.m_RedMask = (MOuint) redmask;
3401  m_VideoFormat.m_GreenMask = (MOuint) greenmask;
3402  m_VideoFormat.m_BlueMask = (MOuint) bluemask;
3403  m_VideoFormat.m_BitCount = (MOuint) bitcount;
3404 
3405  //cout << "Width:" << m_VideoFormat.m_Width << endl;
3406  //cout << "Height:" << m_VideoFormat.m_Height << endl;
3407  //cout << "Framerate:" << m_VideoFormat.m_FrameRate << endl;
3408 
3409  //m_VideoFormat.m_BitCount = pVih->bmiHeader.biBitCount;
3410  //m_VideoFormat.m_BitRate = pVih->dwBitRate;
3411  if (buffer!=NULL) {
3412  m_VideoFormat.m_TimePerFrame = Gbuffer->duration;
3413 #ifndef GSTVERSION
3414  m_VideoFormat.m_BufferSize = Gbuffer->size;
3415 #else
3416  m_VideoFormat.m_BufferSize = gst_buffer_get_size( Gbuffer );
3417 #endif
3418  }
3421 
3422  }
3423 
3424  MODebug2->Message(
3425  "SetVideoFormat: we have a format!!"
3427  + " X "
3429  + " m_BitCount: "
3431  + " m_BufferSize: "
3433  + " buffer duration: "
3435  + " m_FrameRate: "
3437  + " m_RedMask: "
3439  + " m_GreenMask: "
3441  + " m_BlueMask: "
3443 
3444  );
3445 
3446 
3447 }
3448 
3449 
3450 
3451 
3452 
3453 
3454 /*
3455  *
3456 
3457  GST_STATE_NULL: this is the default state. This state will deallocate all resources held by the element.
3458  *
3459 
3460  GST_STATE_READY: in the ready state, an element has allocated all of its global resources, that is, resources that can be kept
3461  within streams. You can think about opening devices, allocating buffers and so on. However,
3462  the stream is not opened in this state, so the stream positions is automatically zero.
3463  If a stream was previously opened, it should be closed in this state, and position, properties and such should be reset.
3464  *
3465 
3466  GST_STATE_PAUSED: in this state, an element has opened the stream, but is not actively processing it. An element is allowed to
3467  modify a stream's position, read and process data and such to prepare for playback as soon as state is changed to PLAYING,
3468  but it is not allowed to play the data which would make the clock run. In summary, PAUSED is the same as PLAYING but without
3469  a running clock.
3470 
3471  Elements going into the PAUSED state should prepare themselves for moving over to the PLAYING state as soon as possible.
3472  Video or audio outputs would, for example, wait for data to arrive and queue it so they can play it right after the state change.
3473  Also, video sinks can already play the first frame (since this does not affect the clock yet). Autopluggers could use this same
3474  state transition to already plug together a pipeline. Most other elements, such as codecs or filters, do not need to explicitely
3475  do anything in this state, however.
3476  *
3477 
3478  GST_STATE_PLAYING: in the PLAYING state, an element does exactly the same as in the PAUSED state, except that the clock now runs.
3479 
3480 */
3481 
3482 
3483 
3484 /*
3485 typedef enum {
3486  GST_STATE_CHANGE_FAILURE = 0,
3487  GST_STATE_CHANGE_SUCCESS = 1,
3488  GST_STATE_CHANGE_ASYNC = 2,
3489  GST_STATE_CHANGE_NO_PREROLL = 3
3490 } GstStateChangeReturn;
3491 */
3492 
3493 bool
3494 moGsGraph::CheckState( moGstStateChangeReturn state_change_result, bool waitforsync) {
3495 
3496 
3497  GstStateChangeReturn Gstate_change_result = (GstStateChangeReturn)state_change_result;
3498 
3499  if (!waitforsync)
3500  switch(Gstate_change_result) {
3501  case GST_STATE_CHANGE_FAILURE:
3502  //MODebug2->Push(moText("GST_STATE_CHANGE_FAILURE"));
3503  return false;
3504  break;
3505  case GST_STATE_CHANGE_SUCCESS:
3506  //MODebug2->Push(moText("GST_STATE_CHANGE_SUCCESS"));
3507  return true;
3508  break;
3509  case GST_STATE_CHANGE_ASYNC:
3510  //MODebug2->Push(moText("GST_STATE_CHANGE_ASYNC"));
3511  return true;
3512  break;
3513  case GST_STATE_CHANGE_NO_PREROLL:
3514  //MODebug2->Push(moText("GST_STATE_CHANGE_NO_PREROLL"));
3515  return false;
3516  break;
3517  }
3518 
3519  GstStateChangeReturn state_wait;
3520  GstState current_state, pending_state;
3521  GstClockTime time_out = GST_CLOCK_TIME_NONE;
3522  time_out = GST_SECOND;
3523 
3524  while(waitforsync) {
3525  state_wait = gst_element_get_state(GST_ELEMENT (m_pGstPipeline),&current_state, &pending_state, time_out);
3526  switch(state_wait) {
3527  case GST_STATE_CHANGE_SUCCESS:
3528  waitforsync = false;
3529  return true;
3530  break;
3531  case GST_STATE_CHANGE_FAILURE:
3532  waitforsync = false;
3533  return false;
3534  break;
3535  default:
3536  waitforsync = false;
3537  break;
3538  /*
3539  case GST_STATE_CHANGE_ASYNC:
3540  waitforsync = true;
3541  break;
3542  case GST_STATE_CHANGE_NO_PREROLL:
3543  waitforsync = true;
3544  break;
3545  */
3546  }
3547  }
3548 
3549  return false;
3550 
3551 }
3552 
3554 
3555  GstStateChangeReturn state_wait;
3556  GstState current_state, pending_state;
3557  GstClockTime time_out = GST_CLOCK_TIME_NONE;
3558  time_out = GST_SECOND;
3559 
3560  GstPad* srcRGB = NULL;
3561  bool padactive = false;
3562  bool padlinked = false;
3563  bool padblocked = false;
3564  bool padblocking = false;
3565 
3566 
3567  if (m_pColorSpace) {
3568 #ifndef GSTVERSION
3569  srcRGB = gst_element_get_pad ( (GstElement*)m_pColorSpace, "src");
3570 #else
3571  srcRGB = gst_element_get_static_pad ( (GstElement*)m_pColorSpace, "src" );
3572 #endif
3573 
3574  padactive = gst_pad_is_active( srcRGB );
3575  padlinked = gst_pad_is_linked( srcRGB );
3576  padblocked = gst_pad_is_blocked( srcRGB );
3577  padblocking = gst_pad_is_blocking( srcRGB );
3578  }
3579 
3580  if (m_pGMainContext) {
3581  if (g_main_context_iteration( (GMainContext*)m_pGMainContext, false )) {
3582  //MODebug2->Message( moText("moGsGraph ::GetState (events)") );
3583  } else {
3584  //MODebug2->Message( moText("moGsGraph ::GetState (no events!!)"));
3585  }
3586  }
3587 /*
3588  MODebug2->Message( moText(" Position:")
3589  + IntToStr( this->GetPosition())
3590  //+ moText(" pad active: ")
3591  // + IntToStr((int)padactive)
3592  // + moText(" pad linked: ")
3593  // + IntToStr((int)padlinked)
3594  // + moText(" pad blocked: ")
3595  // + IntToStr((int)padblocked)
3596  // + moText(" pad blocking: ")
3597  // + IntToStr((int)padblocking)
3598  );
3599 */
3600  //MODebug2->Message( moText("moGsGraph ::GetState > gst_element_get_state"));
3601  state_wait = gst_element_get_state(GST_ELEMENT (m_pGstPipeline),&current_state, &pending_state, time_out);
3602  /*g_main_context_iteration
3603  GST_STATE_VOID_PENDING = 0,
3604  GST_STATE_NULL = 1,
3605  GST_STATE_READY = 2,
3606  GST_STATE_PAUSED = 3,
3607  GST_STATE_PLAYING = 4
3608  */
3609 
3610  switch(current_state) {
3611  case GST_STATE_VOID_PENDING:
3612  //MODebug2->Message( moText("moGsGraph ::GetState GST_STATE_VOID_PENDING"));
3613  return MO_STREAMSTATE_UNKNOWN;
3614  break;
3615  case GST_STATE_NULL:
3616  //MODebug2->Message( moText("moGsGraph ::GetState GST_STATE_NULL"));
3617  return MO_STREAMSTATE_STOPPED;
3618  break;
3619  case GST_STATE_READY:
3620  //MODebug2->Message( moText("moGsGraph ::GetState GST_STATE_READY"));
3621  return MO_STREAMSTATE_READY;
3622  break;
3623  case GST_STATE_PAUSED:
3624  //MODebug2->Message( moText("moGsGraph ::GetState GST_STATE_PAUSED"));
3625  return MO_STREAMSTATE_PAUSED;
3626  break;
3627  case GST_STATE_PLAYING:
3628  //MODebug2->Message( moText("moGsGraph ::GetState GST_STATE_PLAYING"));
3629  return MO_STREAMSTATE_PLAYING;
3630  break;
3631  }
3632 
3633  //MODebug2->Message( moText("moGsGraph ::GetState MO_STREAMSTATE_UNKNOWN"));
3634 
3635  return MO_STREAMSTATE_UNKNOWN;
3636 
3637 }
3638 
3639 //CONTROL METHODS
3640 void
3642  /* start the pipeline */
3643  //MODebug2->Message(moText("moGsGraph::Play()"));
3644  //MODebug2->Message(moText("moGsGraph::Play( SetEOS)"));
3645  SetEOS(false);
3646  //MODebug2->Message(moText("moGsGraph::Play() calling CheckState -> GST_STATE_PLAYING"));
3647  CheckState( gst_element_set_state (GST_ELEMENT (m_pGstPipeline), GST_STATE_PLAYING), true );
3648  //MODebug2->Message(moText("moGsGraph::Play() returnin CheckState."));
3649 }
3650 
3651 void
3653  /*set state to NULL*/
3654  SetEOS(false);
3655  CheckState( gst_element_set_state (GST_ELEMENT (m_pGstPipeline), GST_STATE_NULL) );
3656  //moGsGraph::Pause();
3657 }
3658 
3659 void
3661 /*set state to NULL*/
3664  CheckState( gst_element_set_state (GST_ELEMENT (m_pGstPipeline), GST_STATE_PAUSED));
3665  }
3666 }
3667 
3668 #define MO_INFINITE -1
3669 
3673 void
3674 moGsGraph::Seek( MOuint frame, float rate ) {
3675 
3676  gint64 time_nanoseconds;
3677  bool res;
3678  rate = 1.0;
3679  //MODebug2->Message(moText("moGsGraph :: Seeking:") + IntToStr(frame) );
3680 
3681  if (m_VideoFormat.m_TimePerFrame!=0 && m_FramesLength>0 && m_FramesLength<(MOulong)MO_INFINITE) {
3682 
3685  if ( (GetState()==MO_STREAMSTATE_PAUSED) && frame >= (m_FramesLength - 1) ) {
3686  frame = m_FramesLength - 1;
3687  }
3688 
3689  time_nanoseconds = (gint64) frame * m_VideoFormat.m_TimePerFrame;
3690  //MODebug2->Message(" Seeking frame: " + IntToStr(frame) + " time (ns): " + IntToStr(time_nanoseconds) + " timeperframe:" + IntToStr(m_VideoFormat.m_TimePerFrame) );
3691  //cout << "seeking frame:" << frame << " in " << time_nanoseconds << endl;
3692  /*res = gst_element_seek (m_pGstPipeline, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
3693  GST_SEEK_TYPE_SET, time_nanoseconds,
3694  GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
3695  */
3696  res = gst_element_seek_simple(
3697  (GstElement*)m_pGstPipeline,
3698  GST_FORMAT_TIME,
3699  (GstSeekFlags)(
3700  GST_SEEK_FLAG_FLUSH
3701  | GST_SEEK_FLAG_KEY_UNIT
3702  //| GST_SEEK_FLAG_ACCURATE
3703  ),
3704  time_nanoseconds );
3705  //cout << "success:" << res << endl;
3706  //this->Pause();
3707  } else {
3709  time_nanoseconds = frame * GST_MSECOND;
3710  res = gst_element_seek_simple( (GstElement*)m_pGstPipeline, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT ), time_nanoseconds );
3711  if (res!=true) {
3712  MODebug2->Error("moGsGraph :: Seek (time) error");
3713  }
3717  }
3718 
3719 }
3720 
3721 
3722 MOulong
3724 
3725  GstFormat fmt = GST_FORMAT_TIME;
3726 
3727  gint64 len,lenF;
3728 #ifndef GSTVERSION
3729  if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &len)) {
3730 #else
3731  if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &len)) {
3732 #endif
3733  /*g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
3734  GST_TIME_ARGS (pos), GST_TIME_ARGS (len));*/
3735  //if (m_VideoFormat.m_TimePerFrame) m_VideoFormat.m_TimePerFrame = 25;
3737  lenF = ( len / ( m_VideoFormat.m_TimePerFrame ) );
3738  //cout << "gsgraph: len: ns: " << len << " frames:" << m_FramesLength << endl;
3739  MODebug2->Message( "Total length (miliseconds):" + IntToStr(len/GST_MSECOND) + " (frames): " + IntToStr(lenF));
3740  m_FramesLength = lenF;
3741  return m_FramesLength;
3742  }
3743 
3744  return 0;
3745 }
3746 
3747 MOulong
3749  GstFormat fmt = GST_FORMAT_TIME;
3750 
3751  gint64 len,lenF;
3752 #ifndef GSTVERSION
3753  if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &len)) {
3754 #else
3755  if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &len)) {
3756 #endif
3757  /*g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
3758  GST_TIME_ARGS (pos), GST_TIME_ARGS (len));*/
3760  lenF = ( len / ( m_AudioFormat.m_TimePerSample ) );
3761  //cout << "gsgraph: len: ns: " << len << " frames:" << m_FramesLength << endl;
3762  MODebug2->Message( "Total length (miliseconds):" + IntToStr(len/GST_MSECOND) + " (samples): " + IntToStr(lenF));
3763  m_SamplesLength = lenF;
3764  return m_SamplesLength;
3765  }
3766 
3767  return 0;
3768 }
3769 
3770 MOulong
3772 
3773  GstFormat fmt = GST_FORMAT_TIME;
3774 
3775  gint64 dur;
3776 
3777 #ifndef GSTVERSION
3778  if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &dur)) {
3779 #else
3780  if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &dur)) {
3781 #endif
3782  m_Duration = GST_TIME_AS_MSECONDS(dur); //in milliseconds 1ms = 1 million ns
3783  //cout << "gsgraph: dur: ns: " << dur << endl;
3784  return m_Duration;
3785  }
3786 
3787  return 0;
3788 }
3789 
3792 
3793 MOulong
3795 
3796  GstFormat fmt = GST_FORMAT_TIME;
3797  gint64 pos,frame;
3798 
3799 #ifndef GSTVERSION
3800  if (gst_element_query_position ((GstElement*)m_pGstPipeline, &fmt, &pos)) {
3801 #else
3802  if (gst_element_query_position ((GstElement*)m_pGstPipeline, fmt, &pos)) {
3803 #endif
3804  if (m_VideoFormat.m_TimePerFrame==0) {
3805  return (pos / 1000000);
3806  }
3807  frame = pos / (gint64) m_VideoFormat.m_TimePerFrame;
3808  return (MOulong)frame;
3809  }
3810  return 0;
3811 }
3812 
3813 MOulong
3815 
3816  GstFormat fmt = GST_FORMAT_TIME;
3817  gint64 pos;
3818 
3819 #ifndef GSTVERSION
3820  if (gst_element_query_position ((GstElement*)m_pGstPipeline, &fmt, &pos)) {
3821 #else
3822  if (gst_element_query_position ((GstElement*)m_pGstPipeline, fmt, &pos)) {
3823 #endif
3824  return (MOulong)GST_TIME_AS_MSECONDS(pos);
3825  }
3826  return 0;
3827 }
3828 
3829 
3830 bool
3832  if (!m_pGstPipeline) return false;
3833  if (gst_element_get_state ((GstElement*)m_pGstPipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE ) return false;
3834  return true;
3835 }
3836 
3837 void
3838 moGsGraph::SetVolume( float volume ) {
3839  if (m_pAudioVolume && m_bInitialized ) {
3840  g_object_set ( (GstElement*)m_pAudioVolume, "volume", volume, NULL);
3841  }
3842 }
3843 
3844 void
3845 moGsGraph::SetBalance( float balance ) {
3846  if (m_pAudioPanorama && m_bInitialized ) {
3847  g_object_set ( (GstElement*)m_pAudioPanorama, "panorama", balance, NULL);
3848  }
3849 }
3850 
3851 void
3852 moGsGraph::SetPitch( float pitch ) {
3853  if (m_pAudioSpeed && m_bInitialized ) {
3854  //Pause();
3855  g_object_set ( (GstElement*)m_pAudioSpeed, "speed", pitch, NULL);
3856  //Play();
3857  }
3858 }
3859 
3860 void
3861 moGsGraph::SetEchoDelay( float delay ) {
3862  unsigned long long delayl = delay;
3863  if (m_pAudioEcho && m_bInitialized ) {
3864  g_object_set ( (GstElement*)m_pAudioEcho, "delay", delayl, NULL);
3865  }
3866 }
3867 
3868 void
3869 moGsGraph::SetEchoIntensity( float intensity ) {
3870  if (m_pAudioEcho && m_bInitialized ) {
3871  g_object_set ( (GstElement*)m_pAudioEcho, "intensity", intensity, NULL);
3872  }
3873 }
3874 
3875 void
3876 moGsGraph::SetEchoFeedback( float feedback ) {
3877  if (m_pAudioEcho && m_bInitialized ) {
3878  g_object_set ( (GstElement*)m_pAudioEcho, "feedback", feedback, NULL);
3879  }
3880 }
3881 
3882 
3883 void moGsGraph::SetBrightness( float brightness ) {
3884  if (m_pVideoBalance && m_bInitialized ) {
3885  g_object_set ( (GstElement*)m_pVideoBalance, "brightness", brightness, NULL);
3886  }
3887 }
3888 
3889 
3890 
3891 void moGsGraph::SetContrast( float contrast ) {
3892  if (m_pVideoBalance && m_bInitialized ) {
3893  g_object_set ( (GstElement*)m_pVideoBalance, "contrast", contrast, NULL);
3894  }
3895 }
3896 
3897 
3898 
3899 void moGsGraph::SetHue( float hue ) {
3900  if (m_pVideoBalance && m_bInitialized ) {
3901  g_object_set ( (GstElement*)m_pVideoBalance, "hue", hue, NULL);
3902  }
3903 }
3904 
3905 
3906 
3907 void moGsGraph::SetSaturation( float saturation ) {
3908  if (m_pVideoBalance && m_bInitialized ) {
3909  g_object_set ( (GstElement*)m_pVideoBalance, "saturation", saturation, NULL);
3910  }
3911 }
3912 
3913 
3914 
3915 MObyte *
3918  size = NULL;
3919  return NULL;
3920 }
3921 
3922 
3923 /*GET SOURCES FROM FILTER*/
3924 /*
3925 GstElement* tee;
3926 GstPad * pad;
3927  gchar *name;
3928 
3929  pad = gst_element_get_request_pad (tee, "src%d");
3930  name = gst_pad_get_name (pad);
3931  g_print ("A new pad %s was created\n", name);
3932  g_free (name);
3933 */
3934 
3935 
3936 /*CONNECTING TOW PINS OR PADS WITH SOME CAPABLITIES SET*/
3937 /*
3938  gboolean link_ok;
3939  GstCaps *caps;
3940 
3941  caps = gst_caps_new_full (
3942  gst_structure_new ("video/x-raw-yuv",
3943  "width", G_TYPE_INT, 384,
3944  "height", G_TYPE_INT, 288,
3945  "framerate", GST_TYPE_FRACTION, 25, 1,
3946  NULL),
3947  gst_structure_new ("video/x-raw-rgb",
3948  "width", G_TYPE_INT, 384,
3949  "height", G_TYPE_INT, 288,
3950  "framerate", GST_TYPE_FRACTION, 25, 1,
3951  NULL),
3952  NULL);
3953 
3954  link_ok = gst_element_link_filtered (element1, element2, caps);
3955  gst_caps_unref (caps);
3956 
3957  if (!link_ok) {
3958  g_warning ("Failed to link element1 and element2!");
3959  }
3960 */
3961 
3962 
3963 
3964 /*
3965 #include <gst/gst.h>
3966 
3967 [.. my_bus_callback goes here ..]
3968 
3969 static gboolean
3970 idle_exit_loop (gpointer data)
3971 {
3972  g_main_loop_quit ((GMainLoop *) data);
3973 
3974 
3975  return FALSE;
3976 }
3977 
3978 static void
3979 cb_typefound (GstElement *typefind,
3980  guint probability,
3981  GstCaps *caps,
3982  gpointer data)
3983 {
3984  GMainLoop *loop = data;
3985  gchar *type;
3986 
3987  type = gst_caps_to_string (caps);
3988  g_print ("Media type %s found, probability %d%%\n", type, probability);
3989  g_free (type);
3990 
3991  g_idle_add (idle_exit_loop, loop);
3992 }
3993 
3994 gint
3995 main (gint argc,
3996  gchar *argv[])
3997 {
3998  GMainLoop *loop;
3999  GstElement *pipeline, *filesrc, *typefind;
4000  GstBus *bus;
4001 
4002  //
4003  gst_init (&argc, &argv);
4004  loop = g_main_loop_new (NULL, FALSE);
4005 
4006  //
4007  if (argc != 2) {
4008  g_print ("Usage: %s <filename>\n", argv[0]);
4009  return -1;
4010  }
4011 
4012 
4013  pipeline = gst_pipeline_new ("pipe");
4014 
4015  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
4016  gst_bus_add_watch (bus, my_bus_callback, NULL);
4017  gst_object_unref (bus);
4018 
4019 
4020  filesrc = gst_element_factory_make ("filesrc", "source");
4021  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
4022  typefind = gst_element_factory_make ("typefind", "typefinder");
4023  g_signal_connect (typefind, "have-type", G_CALLBACK (cb_typefound), loop);
4024 
4025 
4026  gst_bin_add_many (GST_BIN (pipeline), filesrc, typefind, NULL);
4027  gst_element_link (filesrc, typefind);
4028  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
4029  g_main_loop_run (loop);
4030 
4031 
4032  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
4033  gst_object_unref (GST_OBJECT (pipeline));
4034 
4035  return 0;
4036 }
4037 
4038 */
4039 #endif
4040 
moCaptureDevices m_PreferredDevices
Dispositivos de video disponibles.
Definition: moVideoGraph.h:572
moAudioFormat m_AudioFormat
Formato de video.
Definition: moVideoGraph.h:762
virtual ~moGsGraph()
Destructor.
Definition: moGsGraph.cpp:1499
MOuint m_SampleRate
Definition: moVideoGraph.h:275
void SetEchoIntensity(float intensity)
Definition: moGsGraph.cpp:3869
MOuint m_TimePerSample
Definition: moVideoGraph.h:276
MOuint m_GreenMask
Definition: moVideoGraph.h:212
bool moGBoolean
Definition: moGsGraph.h:70
#define DECODEBIN
Definition: moGsGraph.cpp:60
#define MOulong
Definition: moTypes.h:392
bool BuildLiveVideoGraph(moText filename, moBucketsPool *pBucketsPool)
Grafo de reproducción de video en modo vivo, asyncronicamente reproducido en función del clock...
Definition: moGsGraph.cpp:2952
void SetAudioFormat(moGstCaps *caps, moGstBuffer *buffer=NULL)
Definition: moGsGraph.cpp:3278
moVideoFormat GetVideoFormat()
Devuelve el formato de video.
virtual void SetEOS(bool iseos)
Definition: moGsGraph.cpp:1772
void Error(moText p_text)
Anuncia y registra un error.
Definition: moAbstract.cpp:79
static moGBoolean cb_have_data(moGstPad *pad, moGstBuffer *buffer, moGPointer u_data)
Definition: moGsGraph.cpp:275
int GetSourceHeight() const
Devuelve el alto de la imagen de origen.
Definition: moVideoGraph.h:438
int GetSourceBpp() const
Devuelve los bits por pixel de la imagen de origen.
Definition: moVideoGraph.h:443
bool DestroyRetreivedBucket()
Definition: moBuckets.cpp:244
#define VIDEOCONVERT
Definition: moGsGraph.cpp:56
void ToLower()
Definition: moText.cpp:632
void Stop()
Detener la reproducción del video.
Definition: moGsGraph.cpp:3652
MOuint m_BufferSize
Definition: moVideoGraph.h:208
Definition: moLock.h:50
void moGstAppSink
Definition: moGsGraph.h:76
void SetPitch(float pitch)
Definition: moGsGraph.cpp:3852
MOuint m_RedMask
Definition: moVideoGraph.h:211
int GetSourceFlipH() const
Devuelve el valor de inversión de imagen horizontal.
Definition: moVideoGraph.h:448
moColorMode m_ColorMode
Definition: moVideoGraph.h:202
moCaptureDevices m_CaptureDevices
Definition: moVideoGraph.h:571
void * moGPointer
Definition: moGsGraph.h:68
void SetContrast(float contrast)
Definition: moGsGraph.cpp:3891
bool Unlock()
Libera el acceso al buffer interno.
Definition: moBuckets.cpp:49
MOuint m_TimePerFrame
Definition: moVideoGraph.h:206
virtual bool CheckCaptureDevice(int i)
Chequea si el dispositivos de video disponible está aún disponible.
Definition: moGsGraph.cpp:1414
long signal_handoff_id
Definition: moGsGraph.h:285
void SetVideoMode()
Definition: moVideoGraph.h:173
MOuint m_BufferSize
Definition: moVideoGraph.h:280
void Seek(MOuint frame, float rate=1.0)
Definition: moGsGraph.cpp:3674
bool Lock()
Paraliza el acceso al buffer interno.
Definition: moBuckets.cpp:45
#define MObyte
Definition: moTypes.h:400
int moGstStateChangeReturn
Definition: moGsGraph.h:71
bool CheckState(moGstStateChangeReturn state_change_result, bool waitforsync=false)
Definition: moGsGraph.cpp:3494
MOulong moGetTicksAbsolute(bool force_real_absolute)
Devuelve en milisegundos el valor del reloj de Moldeo.
Definition: moTimer.cpp:15
void RetreivePads(moGstElement *FilterElement)
Definition: moGsGraph.cpp:2636
int moGstFlowReturn
Definition: moGsGraph.h:77
MOulong GetSamplesLength()
Definition: moGsGraph.cpp:3748
bool BuildLiveGraph(moBucketsPool *pBucketsPool, moCaptureDevice p_capdev)
Definition: moGsGraph.cpp:1821
bool BuildLiveStreamingGraph(moBucketsPool *pBucketsPool, moText p_location)
Definition: moGsGraph.cpp:1841
int GetSourceFlipV() const
Devuelve el valor de inversión de imagen vertical.
Definition: moVideoGraph.h:453
bool BuildLiveSound(moText filename)
Definition: moGsGraph.cpp:2737
MOboolean Exists()
Definition: moFile.cpp:436
virtual bool InitGraph()
Inicialización del grafo.
Definition: moGsGraph.cpp:1532
static moGBoolean cb_buffer_disconnected(moGPointer u_data)
Definition: moGsGraph.cpp:160
MOboolean m_WaitForFormat
Definition: moVideoGraph.h:282
moVideoFormat & GetVideoFormat()
Devuelve el formato de video del dispositivo.
Definition: moVideoGraph.h:400
virtual moCaptureDevices * UpdateCaptureDevices()
Actualiza los dispositivos de video disponibles.
Definition: moGsGraph.cpp:1405
clase de para manejar textos
Definition: moText.h:75
MOuint m_Channels
Definition: moVideoGraph.h:271
#define MOlong
Definition: moTypes.h:391
static void cb_pad_added(moGstElement *decodebin2, moGstPad *pad, moGPointer u_data)
Definition: moGsGraph.cpp:666
void moGstBuffer
Definition: moGsGraph.h:65
MOuint m_BlueMask
Definition: moVideoGraph.h:213
void moGstPadProbeInfo
Definition: moGsGraph.h:75
virtual moCaptureDevices * LoadCaptureDevices()
Definition: moGsGraph.cpp:1159
const moText & GetLabelName() const
Devuelve el nombre de código del dispositivo.
Definition: moVideoGraph.h:428
moText0 moText
Definition: moText.h:291
virtual MOulong GetDuration()
La duración total del stream en nanosegundos.
Definition: moGsGraph.cpp:3771
static void cb_handoff(moGstElement *fakesrc, moGstBuffer *buffer, moGstPad *pad, moGPointer user_data)
Definition: moGsGraph.cpp:841
moStreamState
Definition: moVideoGraph.h:140
void moGstElement
Definition: moGsGraph.h:62
void SetLabelName(const moText &p_labelname)
Fija el nombre de código del dispositivo.
Definition: moVideoGraph.h:423
bool IsRunning()
Está corriendo.
Definition: moGsGraph.cpp:3831
GStreamer Graph Class.
Definition: moGsGraph.h:151
void Present(bool p=true)
Fija la presencia del dispositivo.
Definition: moVideoGraph.h:413
const moText & GetName() const
Devuelve el nombre del dispositivo.
Definition: moVideoGraph.h:380
void SetBrightness(float brightness)
Definition: moGsGraph.cpp:3883
moText0 & Right(MOuint)
Definition: moText.cpp:491
#define MOint
Definition: moTypes.h:388
const char * message
void moGMainContext
Definition: moGsGraph.h:73
void Pause()
Pausa la reproducción del video.
Definition: moGsGraph.cpp:3660
void SetEchoFeedback(float feedback)
Definition: moGsGraph.cpp:3876
bool BuildLiveWebcamGraph(moBucketsPool *pBucketsPool, moCaptureDevice &p_capdev)
Definition: moGsGraph.cpp:1981
void SetHue(float hue)
Definition: moGsGraph.cpp:3899
Administrador de moBucket 's.
Definition: moBuckets.h:152
void WaitForFormatDefinition(MOulong timeout)
Definition: moGsGraph.cpp:2717
MOuint Length() const
Definition: moText.cpp:347
virtual ~moGsFramework()
Definition: moGsGraph.cpp:1085
int GetSourceWidth() const
Devuelve el ancho de la imagen de origen.
Definition: moVideoGraph.h:433
virtual bool AddCaptureDevice(moCaptureDevice &p_capdev)
Agrega un dispositivo de video.
Definition: moGsGraph.cpp:1423
void moGstCaps
Definition: moGsGraph.h:66
static moDebug * MODebug2
Clase de impresión de errores para depuración
Definition: moAbstract.h:225
static void cb_newpad(moGstElement *decodebin, moGstPad *pad, moGBoolean last, moGPointer u_data)
Definition: moGsGraph.cpp:473
Definición de un dispositivo de video, generalmente uno de captura de video, o camara.
Definition: moVideoGraph.h:336
static void on_rtsppadd_added(moGstElement *rtspsrc, moGstPad *pad, moGPointer u_data)
Definition: moGsGraph.cpp:411
Espacio en memoria para compartir datos entre objetos.
Definition: moBuckets.h:53
void Push(moText p_text)
Apila el mensaje dentro de la pila de mensajes.
Definition: moAbstract.h:115
#define MO_INFINITE
Definition: moGsGraph.cpp:3668
MOulong GetFramesLength()
La cantidad de frames, el largo del stream.
Definition: moGsGraph.cpp:3723
virtual MOulong GetPositionMS()
Definition: moGsGraph.cpp:3814
void SetVolume(float volume)
Definition: moGsGraph.cpp:3838
moGsGraph()
Constructor.
Definition: moGsGraph.cpp:1445
virtual moStreamState GetState()
Estado de la reproducción.
Definition: moGsGraph.cpp:3553
void SetName(const moText &p_name)
Definition: moVideoGraph.h:375
void moGstPad
Definition: moGsGraph.h:63
bool BuildLiveQTVideoGraph(moText filename, moBucketsPool *pBucketsPool)
Definition: moGsGraph.cpp:2628
virtual bool IsEOS()
Definition: moGsGraph.cpp:1767
int moGstCallbackReturn
Definition: moGsGraph.h:74
void Play()
Reproducir el video.
Definition: moGsGraph.cpp:3641
bool BuildRecordGraph(moText filename, moBucketsPool *pBucketsPool)
Definition: moGsGraph.cpp:1854
MOuint m_SampleSize
Definition: moVideoGraph.h:274
#define MOuint
Definition: moTypes.h:387
LIBMOLDEO_API moText0 IntToStr(int a)
Definition: moText.cpp:1070
MObyte * GetFrameBuffer(MOlong *size)
Definition: moGsGraph.cpp:3916
MOubyte * GetBuffer()
Devuelve el puntero al buffer de datos.
Definition: moBuckets.cpp:58
void SetSaturation(float saturation)
Definition: moGsGraph.cpp:3907
moBucket * RetreiveBucket()
Definition: moBuckets.cpp:205
MOuint m_BitCount
Definition: moVideoGraph.h:207
MOboolean m_WaitForFormat
Definition: moVideoGraph.h:210
MOuint m_FrameRate
Definition: moVideoGraph.h:209
void SetBuffer(MOlong size, MOubyte *pbuf)
Crea un espacio de memoria y asigna los valores desde un puntero a otro espacio de memoria...
Definition: moBuckets.cpp:87
void CopyVideoFrame(void *bufferdst, int size)
Definition: moGsGraph.cpp:1790
void SetEchoDelay(float delay)
Definition: moGsGraph.cpp:3861
void SetVideoFormat(moGstCaps *caps, moGstBuffer *buffer=NULL)
Definition: moGsGraph.cpp:3362
MOboolean m_bInitialized
Valor de inicialización
Definition: moAbstract.h:223
int Find(const moText0 &target)
divide el texto separado por el caracter especificado
Definition: moText.cpp:683
virtual MOulong GetPosition()
Definition: moGsGraph.cpp:3794
void BuildAudioFilters()
Definition: moGsGraph.cpp:2917
bool SetCaptureDevice(moText deviceport, MOint idevice=0)
Definition: moGsGraph.cpp:1780
long m_BusWatchId
Definition: moGsGraph.h:278
moVideoFormat m_VideoFormat
Definition: moVideoGraph.h:761
void Message(moText p_text)
Anuncia un mensaje al usuario además de guardarlo en el log de texto.
Definition: moAbstract.cpp:114
long cb_have_data_handler_id
Definition: moGsGraph.h:250
bool BuildLiveDVGraph(moBucketsPool *pBucketsPool, moCaptureDevice &p_capdev)
Definition: moGsGraph.cpp:1952
virtual bool FinishGraph()
Finalización del grafo.
Definition: moGsGraph.cpp:1606
#define MOubyte
Definition: moTypes.h:399
void SetBalance(float balance)
Fija el balance entre canal izquierdo y derecho en caso de haberlos.
Definition: moGsGraph.cpp:3845
moLock BuildLock
Definition: moGsGraph.cpp:44
bool AddBucket(moBucket *pBucket)
Definition: moBuckets.cpp:149
long signal_newpad_id
Definition: moGsGraph.h:277
long signal_rtsppad_added_id
Definition: moGsGraph.h:255