ArDrone SDK 1.8 added
[mardrone] / mardrone / ARDrone_SDK_Version_1_8_20110726 / Examples / Linux / Navigation / Sources / ihm / ihm_stages_o_gtk.c
1 /*
2  * @ihm_stages_o_gtk.c
3  * @author marc-olivier.dzeukou@parrot.com
4  * @date 2007/07/27
5  *
6  * ihm vision thread implementation
7  *
8  */
9 #include <stdio.h>
10 #include <stdlib.h>
11 #include <ctype.h>
12 #include <termios.h>
13 #include <fcntl.h>
14 #include <errno.h>
15 #include <unistd.h>
16
17 #include <gtk/gtk.h>
18 #include <gtk/gtkcontainer.h>
19 #include <sys/time.h>
20 #include <time.h>
21
22 #include <VP_Api/vp_api.h>
23 #include <VP_Api/vp_api_error.h>
24 #include <VP_Api/vp_api_stage.h>
25 #include <VP_Api/vp_api_picture.h>
26 #include <VP_Stages/vp_stages_io_file.h>
27 #ifdef USE_ELINUX
28 #include <VP_Stages/vp_stages_V4L2_i_camif.h>
29 #else
30 #include <VP_Stages/vp_stages_i_camif.h>
31 #endif
32
33 #include <VP_Os/vp_os_print.h>
34 #include <VP_Os/vp_os_malloc.h>
35 #include <VP_Os/vp_os_delay.h>
36 #include <VP_Stages/vp_stages_yuv2rgb.h>
37 #include <VP_Stages/vp_stages_buffer_to_picture.h>
38
39 #ifdef PC_USE_VISION
40 #     include <Vision/vision_draw.h>
41 #     include <Vision/vision_stage.h>
42 #endif
43
44 #include <config.h>
45
46 #ifdef JPEG_CAPTURE
47 #include <VP_Stages/vp_stages_io_jpeg.h>
48 #else
49 #include <VLIB/Stages/vlib_stage_decode.h>
50 #endif
51
52
53 #include <ardrone_tool/ardrone_tool.h>
54 #include <ardrone_tool/Com/config_com.h>
55
56 //#define USE_FFMPEG_RECORD
57
58 #ifndef RECORD_VIDEO
59 #define RECORD_VIDEO
60 #endif
61
62 #ifdef RECORD_VIDEO
63 #include <ardrone_tool/Video/video_stage_recorder.h>
64         #ifdef USE_FFMPEG_RECORDER
65         #include <ardrone_tool/Video/video_stage_ffmpeg_recorder.h>
66         #endif
67 #endif
68
69 #include <ardrone_tool/Video/video_com_stage.h>
70
71 #include "ihm/ihm.h"
72 #include "ihm/ihm_vision.h"
73 #include "ihm/ihm_stages_o_gtk.h"
74 #include "common/mobile_config.h"
75
76 #define NB_STAGES 10
77
78 #define FPS2TIME  0.10
79
80 #define CAMIF_V_CAMERA_USED CAMIF_CAMERA_CRESYN
81 #define CAMIF_H_CAMERA_USED CAMIF_CAMERA_OVTRULY
82
83 PIPELINE_HANDLE pipeline_handle;
84
85 extern GtkWidget *ihm_ImageWin, *ihm_ImageEntry[9], *ihm_ImageDA, *ihm_VideoStream_VBox;
86 /* For fullscreen video display */
87 extern GtkWindow *fullscreen_window;
88 extern GtkImage *fullscreen_image;
89 extern GdkScreen *fullscreen;
90
91 extern int tab_vision_config_params[10];
92 extern int vision_config_options;
93 extern int image_vision_window_view, image_vision_window_status;
94 extern char video_to_play[16];
95
96 static GtkImage *image = NULL;
97 static GdkPixbuf *pixbuf = NULL;
98
99 static int32_t   pixbuf_width      = 0;
100 static int32_t   pixbuf_height     = 0;
101 static int32_t   pixbuf_rowstride  = 0;
102 static uint8_t*  pixbuf_data       = NULL;
103
104
105
106 C_RESULT
107 output_gtk_stage_open( vp_stages_gtk_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
108 {
109   //  printf("In gtk stage open\n" );
110  /*pixbuf = gdk_pixbuf_new(GDK_COLORSPACE_RGB,
111                           FALSE             ,
112                           8                 ,
113                           cfg->width        ,
114                           cfg->height       );*/
115
116   return (SUCCESS);
117 }
118
119
120
121 void destroy_image_callback( GtkWidget *widget, gpointer data )
122 {
123         image=NULL;
124 }
125
126 extern GtkWidget * ihm_fullScreenFixedContainer;
127 extern GtkWidget * ihm_fullScreenHBox;
128
129 C_RESULT
130 output_gtk_stage_transform( vp_stages_gtk_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
131 {
132         vlib_stage_decoding_config_t* vec;
133
134         if (!ihm_is_initialized) return SUCCESS;
135         if (ihm_ImageWin==NULL) return SUCCESS;
136         if( image_vision_window_view != WINDOW_VISIBLE) return SUCCESS;
137
138         gdk_threads_enter(); //http://library.gnome.org/devel/gdk/stable/gdk-Threads.html
139
140         vec = (vlib_stage_decoding_config_t*)cfg->last_decoded_frame_info;
141
142           pixbuf_width     = vec->controller.width;
143           pixbuf_height    = vec->controller.height;
144           pixbuf_rowstride = cfg->rowstride;
145           pixbuf_data      = (uint8_t*)in->buffers[0];
146
147           //printf("Taille cfg : %i %i \n",cfg->max_width,cfg->max_height);
148
149   if(pixbuf!=NULL)
150   {
151          g_object_unref(pixbuf);
152          pixbuf=NULL;
153   }
154
155           pixbuf = gdk_pixbuf_new_from_data(pixbuf_data,
156                                                                                          GDK_COLORSPACE_RGB,
157                                                                                          FALSE,
158                                                                                          8,
159                                                                                          pixbuf_width,
160                                                                                          pixbuf_height,
161                                                                                          pixbuf_rowstride,
162                                                                                          NULL,
163                                                                                          NULL);
164
165           if (fullscreen!=NULL && fullscreen_window!=NULL)
166           {
167                   pixbuf = gdk_pixbuf_scale_simple ( pixbuf,
168                                                                                          gdk_screen_get_width ( fullscreen ),
169                                                                                          gdk_screen_get_height ( fullscreen ),
170                                                                                          /*GDK_INTERP_HYPER*/
171                                                                                          GDK_INTERP_BILINEAR) ;
172                   /*if (fullscreen_image == NULL)
173                   {
174                           fullscreen_image  = (GtkImage*) gtk_image_new_from_pixbuf( pixbuf );
175                           //if (fullscreen_image == NULL) { printf("Probleme.\n"); }
176                           //gtk_container_add( GTK_CONTAINER( fullscreen_window ), GTK_WIDGET(fullscreen_image) );
177                           gtk_fixed_put(ihm_fullScreenFixedContainer,fullscreen_image,0,0);
178                   }*/
179                   if (fullscreen_image != NULL)
180                   {
181                           gtk_image_set_from_pixbuf(fullscreen_image, pixbuf);
182                           //gtk_widget_show_all (GTK_WIDGET(fullscreen_window));
183                           gtk_widget_show (GTK_WIDGET(fullscreen_image));
184                           //gtk_widget_show(ihm_fullScreenHBox);
185                   }
186           }
187           else
188           {
189                   pixbuf = gdk_pixbuf_scale_simple ( pixbuf,
190                                                                                                          cfg->max_width,
191                                                                                                          cfg->max_height,
192                                                                                                          /*GDK_INTERP_HYPER*/
193                                                                                                          GDK_INTERP_BILINEAR) ;
194
195                   if( image == NULL && pixbuf!=NULL)
196                   {
197                                 image  = (GtkImage*) gtk_image_new_from_pixbuf( pixbuf );
198                                 gtk_signal_connect(GTK_OBJECT(image), "destroy", G_CALLBACK(destroy_image_callback), NULL );
199                                 if(GTK_IS_WIDGET(ihm_ImageWin))
200                                                 if (GTK_IS_WIDGET(ihm_VideoStream_VBox))
201                                                         gtk_container_add( GTK_CONTAINER( ihm_VideoStream_VBox ), (GtkWidget*)image );
202                   }
203                   if( image!=NULL && pixbuf!=NULL )
204                   {
205                          gtk_image_set_from_pixbuf(image, pixbuf);
206                   }
207                   gtk_widget_show_all( ihm_ImageWin );
208           }
209
210          gdk_threads_leave();
211          return (SUCCESS);
212 }
213
214
215 C_RESULT
216 output_gtk_stage_close( vp_stages_gtk_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
217 {
218   return (SUCCESS);
219 }
220
221
222 const vp_api_stage_funcs_t vp_stages_output_gtk_funcs =
223 {
224   NULL,
225   (vp_api_stage_open_t)output_gtk_stage_open,
226   (vp_api_stage_transform_t)output_gtk_stage_transform,
227   (vp_api_stage_close_t)output_gtk_stage_close
228 };
229
230 #ifdef PC_USE_VISION
231 static vp_os_mutex_t draw_trackers_update;
232 /*static*/ vp_stages_draw_trackers_config_t draw_trackers_cfg = { 0 };
233
234 void set_draw_trackers_config(vp_stages_draw_trackers_config_t* cfg)
235 {
236 void*v;
237   vp_os_mutex_lock( &draw_trackers_update );
238   v = draw_trackers_cfg.last_decoded_frame_info;
239   vp_os_memcpy( &draw_trackers_cfg, cfg, sizeof(draw_trackers_cfg) );
240   draw_trackers_cfg.last_decoded_frame_info = v;
241   vp_os_mutex_unlock( &draw_trackers_update );
242 }
243
244 C_RESULT draw_trackers_stage_open( vp_stages_draw_trackers_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
245 {
246   vp_os_mutex_lock( &draw_trackers_update );
247
248   int32_t i;
249   for( i = 0; i < NUM_MAX_SCREEN_POINTS; i++ )
250   {
251     cfg->locked[i] = C_OK;
252   }
253
254   PRINT("Draw trackers inited with %d trackers\n", cfg->num_points);
255
256   vp_os_mutex_unlock( &draw_trackers_update );
257
258   return (SUCCESS);
259 }
260
261 C_RESULT draw_trackers_stage_transform( vp_stages_draw_trackers_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
262 {
263   int32_t i;
264
265   vp_os_mutex_lock( &draw_trackers_update );
266
267   vp_api_picture_t *picture = (vp_api_picture_t *) in->buffers;
268
269   if( in->size > 0 )
270   {
271 #ifdef DEBUG
272     for(i = 0 ; i < cfg->num_points; i++)
273     {
274       int32_t dist;
275       uint8_t color;
276       screen_point_t point;
277
278       point    = cfg->points[i];
279 //       point.x += ACQ_WIDTH / 2;
280 //       point.y += ACQ_HEIGHT / 2;
281
282       if( point.x >= STREAM_WIDTH || point.x < 0 || point.y >= STREAM_HEIGHT || point.y < 0 )
283       {
284         PRINT("Bad point (%d,%d) received at index %d on %d points\n", point.x, point.y, i, cfg->num_points);
285         continue;
286       }
287
288       if( SUCCEED(cfg->locked[i]) )
289       {
290         dist  = 3;
291         color = 0;
292       }
293       else
294       {
295         dist  = 1;
296         color = 0xFF;
297       }
298
299       vision_trace_cross(&point, dist, color, picture);
300     }
301 #endif
302
303     for(i = 0 ; i < cfg->detected ; i++)
304     {
305       //uint32_t centerX,centerY;
306       uint32_t width,height;
307       screen_point_t center;
308       if (cfg->last_decoded_frame_info!=NULL){
309                   center.x = cfg->patch_center[i].x*cfg->last_decoded_frame_info->controller.width/1000;
310                   center.y = cfg->patch_center[i].y*cfg->last_decoded_frame_info->controller.height/1000;
311                   width  = cfg->width[i]*cfg->last_decoded_frame_info->controller.width/1000;
312                   height = cfg->height[i]*cfg->last_decoded_frame_info->controller.height/1000;
313
314                   width = min(2*center.x,width); width = min(2*(cfg->last_decoded_frame_info->controller.width-center.x),width) -1;
315                   height = min(2*center.y,height); width = min(2*(cfg->last_decoded_frame_info->controller.height-center.y),height) -1;
316
317                   vision_trace_colored_rectangle(&center, width, height, 0, 255, 128, picture);
318                   /*Stephane*/vision_trace_colored_rectangle(&center, width-2, height-2, 200, 128-80, 0, picture); // blue
319       }else{printf("Problem drawing rectangle.\n");}
320     }
321   }
322
323   vp_os_mutex_unlock( &draw_trackers_update );
324
325   out->size         = in->size;
326   out->indexBuffer  = in->indexBuffer;
327   out->buffers      = in->buffers;
328
329   out->status       = VP_API_STATUS_PROCESSING;
330
331   return (SUCCESS);
332 }
333
334 C_RESULT draw_trackers_stage_close( vp_stages_draw_trackers_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
335 {
336   return (SUCCESS);
337 }
338
339 const vp_api_stage_funcs_t draw_trackers_funcs =
340 {
341   NULL,
342   (vp_api_stage_open_t)draw_trackers_stage_open,
343   (vp_api_stage_transform_t)draw_trackers_stage_transform,
344   (vp_api_stage_close_t)draw_trackers_stage_close
345 };
346
347 #endif
348
349
350 #ifdef RAW_CAPTURE
351
352 #ifdef RECORD_VISION_DATA
353 #ifdef RECORD_VIDEO
354 extern char video_filename[];
355 #endif
356 static void save_vision_attitude(vision_attitude_t* vision_attitude, int32_t custom_data_size )
357 {
358 #ifdef RECORD_VIDEO
359   static FILE* fp = NULL;
360
361   if( fp == NULL )
362   {
363     char filename[VIDEO_FILENAME_LENGTH];
364     char* dot;
365
366     strcpy( filename, video_filename );
367     dot = strrchr( filename, '.' );
368     dot[1] = 'd';
369     dot[2] = 'a';
370     dot[3] = 't';
371     dot[4] = '\0';
372
373     fp = fopen(filename, "wb");
374   }
375
376   if( fp != NULL )
377   {
378     fwrite( vision_attitude, custom_data_size, 1, fp );
379     fflush( fp );
380   }
381 #endif
382 }
383 #endif
384
385 DEFINE_THREAD_ROUTINE(ihm_stages_vision, data)
386 {
387   int32_t i;
388   C_RESULT res;
389
390   vp_api_io_pipeline_t    pipeline;
391   vp_api_io_data_t        out;
392   vp_api_io_stage_t       stages[NB_STAGES];
393
394   vp_api_picture_t picture;
395
396   video_com_config_t                      icc;
397   vp_stages_buffer_to_picture_config_t    bpc;
398   vp_stages_yuv2rgb_config_t              yuv2rgbconf;
399   vp_stages_gtk_config_t                  gtkconf;
400   video_stage_recorder_config_t           vrc;
401
402   /// Picture configuration
403   picture.format        = PIX_FMT_YUV420P;
404
405   picture.width         = V_ACQ_WIDTH;
406   picture.height        = V_ACQ_HEIGHT;
407   picture.framerate     = CAMIF_V_FRAMERATE_USED;
408
409   picture.y_buf   = vp_os_malloc( V_ACQ_WIDTH * V_ACQ_HEIGHT     );
410   picture.cr_buf  = vp_os_malloc( V_ACQ_WIDTH * V_ACQ_HEIGHT / 4 );
411   picture.cb_buf  = vp_os_malloc( V_ACQ_WIDTH * V_ACQ_HEIGHT / 4 );
412
413   picture.y_line_size   = V_ACQ_WIDTH;
414   picture.cb_line_size  = V_ACQ_WIDTH / 2;
415   picture.cr_line_size  = V_ACQ_WIDTH / 2;
416
417   for(i = 0; i < V_ACQ_WIDTH * V_ACQ_HEIGHT/ 4; i++ )
418   {
419     picture.cr_buf[i] = 0x80;
420     picture.cb_buf[i] = 0x80;
421   }
422
423   vp_os_memset(&icc,          0, sizeof( icc ));
424   vp_os_memset(&bpc,          0, sizeof( bpc ));
425   vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));
426   vp_os_memset(&gtkconf,      0, sizeof( gtkconf ));
427
428   icc.com                 = COM_VIDEO();
429   icc.buffer_size         = 1024;
430   icc.protocol            = VP_COM_TCP;
431   COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
432
433   bpc.picture             = &picture;
434   bpc.y_buffer_size       = picture.width*picture.height;
435   bpc.y_blockline_size    = picture.width*CAMIF_BLOCKLINES; // each blockline have 16 lines
436   bpc.y_current_size      = 0;
437   bpc.num_frames          = 0;
438   bpc.y_buf_ptr           = NULL;
439 #ifdef USE_VIDEO_YUV
440   bpc.cr_buf_ptr          = NULL;
441   bpc.cb_buf_ptr          = NULL;
442 #endif
443 #ifdef USE_VIDEO_YUV
444   bpc.luma_only           = FALSE;
445 #else
446   bpc.luma_only           = TRUE;
447 #endif // USE_VIDEO_YUV
448
449 #ifdef BLOCK_MODE
450   bpc.block_mode_enable   = TRUE;
451 #else
452   bpc.block_mode_enable   = FALSE;
453 #endif
454
455 #ifdef RECORD_VISION_DATA
456   bpc.custom_data_size    = sizeof(vision_attitude_t);
457   bpc.custom_data_handler = (custom_data_handler_cb)save_vision_attitude;
458 #else
459   bpc.custom_data_size    = 0;
460   bpc.custom_data_handler = 0;
461 #endif
462
463   yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
464   if( CAMIF_H_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
465     yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;
466
467   vrc.fp = NULL;
468
469   pipeline.nb_stages = 0;
470
471   stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
472   stages[pipeline.nb_stages].cfg     = (void *)&icc;
473   stages[pipeline.nb_stages].funcs   = video_com_funcs;
474
475   pipeline.nb_stages++;
476
477   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
478   stages[pipeline.nb_stages].cfg     = (void *)&bpc;
479   stages[pipeline.nb_stages].funcs   = vp_stages_buffer_to_picture_funcs;
480
481   pipeline.nb_stages++;
482
483 #ifdef RECORD_VIDEO
484   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
485   stages[pipeline.nb_stages].cfg     = (void*)&vrc;
486   stages[pipeline.nb_stages].funcs   = video_recorder_funcs;
487
488   pipeline.nb_stages++;
489 #endif // RECORD_VIDEO
490
491 #ifdef PC_USE_VISION
492   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
493   stages[pipeline.nb_stages].cfg     = (void*)&draw_trackers_cfg;
494   stages[pipeline.nb_stages].funcs   = draw_trackers_funcs;
495
496   pipeline.nb_stages++;
497 #endif
498
499   stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
500   stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
501   stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;
502
503   pipeline.nb_stages++;
504
505   stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
506   stages[pipeline.nb_stages].cfg     = (vp_stages_gtk_config_t *)&gtkconf;
507   stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;
508
509   pipeline.nb_stages++;
510
511   pipeline.stages = &stages[0];
512
513   // Wait for ihm image window to be visible
514   while( !ardrone_tool_exit() && image_vision_window_view != WINDOW_VISIBLE ) {
515     vp_os_delay( 200 );
516   }
517
518   if( !ardrone_tool_exit() )
519   {
520     PRINT("\n   IHM stage vision thread initialisation\n\n");
521
522     res = vp_api_open(&pipeline, &pipeline_handle);
523
524     if( SUCCEED(res) )
525     {
526       int loop = SUCCESS;
527       out.status = VP_API_STATUS_PROCESSING;
528
529       while( !ardrone_tool_exit() && (loop == SUCCESS) )
530       {
531 #ifdef ND_WRITE_TO_FILE
532         num_picture_decoded = bpc.num_picture_decoded;
533 #endif
534         if( image_vision_window_view == WINDOW_VISIBLE ) {
535           if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
536             if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
537               loop = SUCCESS;
538             }
539           }
540           else loop = -1; // Finish this thread
541         }
542         //vp_os_delay( 25 );
543       }
544
545       vp_api_close(&pipeline, &pipeline_handle);
546     }
547   }
548
549   return (THREAD_RET)0;
550 }
551 #elif defined(JPEG_CAPTURE)
552 DEFINE_THREAD_ROUTINE(ihm_stages_vision, data)
553 {
554   C_RESULT res;
555
556   image = NULL;
557
558   vp_api_io_pipeline_t    pipeline;
559   vp_api_io_data_t        out;
560   vp_api_io_stage_t       stages[NB_STAGES];
561
562   vp_api_picture_t picture;
563
564   video_com_config_t              icc;
565   vp_stages_decoder_jpeg_config_t jdc;
566   vp_stages_yuv2rgb_config_t      yuv2rgbconf;
567   vp_stages_gtk_config_t          gtkconf;
568   video_stage_recorder_config_t   vrc;
569
570   /// Picture configuration
571   picture.format        = PIX_FMT_YUV420P;
572
573   picture.width         = STREAM_WIDTH;
574   picture.height        = STREAM_HEIGHT;
575   picture.framerate     = 30;
576
577   picture.y_buf   = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT     );
578   picture.cr_buf  = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
579   picture.cb_buf  = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
580
581   picture.y_line_size   = STREAM_WIDTH;
582   picture.cb_line_size  = STREAM_WIDTH / 2;
583   picture.cr_line_size  = STREAM_WIDTH / 2;
584
585   vp_os_memset(&icc,          0, sizeof( icc ));
586   vp_os_memset(&jdc,          0, sizeof( jdc ));
587   vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));
588   vp_os_memset(&gtkconf,      0, sizeof( gtkconf ));
589
590   icc.com                 = COM_VIDEO();
591   icc.buffer_size         = 8192;
592   icc.protocol            = VP_COM_UDP;
593   COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
594
595
596   jdc.width                             = STREAM_WIDTH;
597   jdc.height                    = STREAM_HEIGHT;
598   jdc.dct_method                = JDCT_FLOAT;
599
600 /*  vec.width               = ACQ_WIDTH;
601   vec.height              = ACQ_HEIGHT;
602   vec.picture             = &picture;
603 #ifdef USE_VIDEO_YUV
604   vec.luma_only           = FALSE;
605 #else
606   vec.luma_only           = TRUE;
607 #endif // USE_VIDEO_YUV
608   vec.block_mode_enable   = TRUE;*/
609
610   yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
611   if( CAMIF_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
612     yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;
613
614   gtkconf.width     = picture.width;
615   gtkconf.height    = picture.height;
616   gtkconf.rowstride = picture.width * 3;
617
618   vrc.fp = NULL;
619
620   pipeline.nb_stages = 0;
621
622   stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
623   stages[pipeline.nb_stages].cfg     = (void *)&icc;
624   stages[pipeline.nb_stages].funcs   = video_com_funcs;
625
626   pipeline.nb_stages++;
627
628   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
629   stages[pipeline.nb_stages].cfg     = (void*)&jdc;
630   stages[pipeline.nb_stages].funcs   = vp_stages_decoder_jpeg_funcs;
631
632   pipeline.nb_stages++;
633
634 #ifdef RECORD_VIDEO
635   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
636   stages[pipeline.nb_stages].cfg     = (void*)&vrc;
637   stages[pipeline.nb_stages].funcs   = video_recorder_funcs;
638
639   pipeline.nb_stages++;
640 #endif // RECORD_VIDEO
641
642   draw_trackers_cfg.last_decoded_frame_info = &vec;
643   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
644   stages[pipeline.nb_stages].cfg     = (void*)&draw_trackers_cfg;
645   stages[pipeline.nb_stages].funcs   = draw_trackers_funcs;
646
647   pipeline.nb_stages++;
648
649   stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
650   stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
651   stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;
652
653   pipeline.nb_stages++;
654
655   stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
656   stages[pipeline.nb_stages].cfg     = (vp_stages_gtk_config_t *)&gtkconf;
657   stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;
658
659   pipeline.nb_stages++;
660
661   pipeline.stages = &stages[0];
662
663   // Wait for ihm image window to be visible
664   while( !ardrone_tool_exit() && image_vision_window_view != WINDOW_VISIBLE ) {
665     vp_os_delay( 200 );
666   }
667
668   if( !ardrone_tool_exit() )
669   {
670     PRINT("\n   IHM stage vision thread initialisation\n\n");
671
672     res = vp_api_open(&pipeline, &pipeline_handle);
673
674     if( SUCCEED(res) )
675     {
676       int loop = SUCCESS;
677       out.status = VP_API_STATUS_PROCESSING;
678
679       while( !ardrone_tool_exit() && (loop == SUCCESS) )
680       {
681 //#ifdef ND_WRITE_TO_FILE
682 //        num_picture_decoded = vec.num_picture_decoded;
683 //#endif
684         if( image_vision_window_view == WINDOW_VISIBLE ) {
685           if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
686             if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
687               loop = SUCCESS;
688             }
689           }
690           else loop = -1; // Finish this thread
691         }
692         // vp_os_delay( 25 );
693       }
694
695       vp_api_close(&pipeline, &pipeline_handle);
696     }
697   }
698
699   PRINT("   IHM stage vision thread ended\n\n");
700
701   return (THREAD_RET)0;
702 }
703
704 #else// RAW_CAPTURE
705 DEFINE_THREAD_ROUTINE(ihm_stages_vision, data)
706 {
707   C_RESULT res;
708
709   //image = NULL;
710
711   vp_api_io_pipeline_t    pipeline;
712   vp_api_io_data_t        out;
713   vp_api_io_stage_t       stages[NB_STAGES];
714
715   vp_api_picture_t picture;
716
717   video_com_config_t              icc;
718   vlib_stage_decoding_config_t    vec;
719   vp_stages_yuv2rgb_config_t      yuv2rgbconf;
720   vp_stages_gtk_config_t          gtkconf;
721 #ifdef RECORD_VIDEO
722   video_stage_recorder_config_t   vrc;
723 #ifdef USE_FFMPEG_RECORDER
724   video_stage_recorder_config_t   vrc_ffmpeg;
725 #endif
726 #endif
727   /// Picture configuration
728   picture.format        = PIX_FMT_YUV420P;
729   picture.width         = STREAM_WIDTH;
730   picture.height        = STREAM_HEIGHT;
731   picture.framerate     = 30;
732
733   picture.y_buf   = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT     );
734   picture.cr_buf  = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
735   picture.cb_buf  = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
736
737   picture.y_line_size   = STREAM_WIDTH;
738   picture.cb_line_size  = STREAM_WIDTH / 2;
739   picture.cr_line_size  = STREAM_WIDTH / 2;
740
741   vp_os_memset(&icc,          0, sizeof( icc ));
742   vp_os_memset(&vec,          0, sizeof( vec ));
743   vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));
744   vp_os_memset(&gtkconf,      0, sizeof( gtkconf ));
745
746   icc.com                 = COM_VIDEO();
747   icc.buffer_size         = 100000;
748   icc.protocol            = VP_COM_UDP;
749   COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
750
751   vec.width               = STREAM_WIDTH;
752   vec.height              = STREAM_HEIGHT;
753   vec.picture             = &picture;
754 #ifdef USE_VIDEO_YUV
755   vec.luma_only           = FALSE;
756 #else
757   vec.luma_only           = TRUE;
758 #endif // USE_VIDEO_YUV
759   vec.block_mode_enable   = TRUE;
760
761   vec.luma_only           = FALSE;
762   yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
763   if( CAMIF_H_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
764     yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;
765   gtkconf.max_width     = picture.width;
766   gtkconf.max_height    = picture.height;
767   gtkconf.rowstride = picture.width * 3;
768   gtkconf.last_decoded_frame_info = (void*)&vec;
769
770 #ifdef RECORD_VIDEO
771   vrc.fp = NULL;
772 #endif
773
774   pipeline.nb_stages = 0;
775
776   stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
777   stages[pipeline.nb_stages].cfg     = (void *)&icc;
778   stages[pipeline.nb_stages].funcs   = video_com_funcs;
779
780   pipeline.nb_stages++;
781
782   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
783   stages[pipeline.nb_stages].cfg     = (void*)&vec;
784   stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;
785
786   pipeline.nb_stages++;
787
788 #ifdef RECORD_VIDEO
789 //#warning Recording video option enabled in Navigation.
790   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
791   stages[pipeline.nb_stages].cfg     = (void*)&vrc;
792   stages[pipeline.nb_stages].funcs   = video_recorder_funcs;
793
794   pipeline.nb_stages++;
795
796 #ifdef USE_FFMPEG_RECORDER
797 //#warning FFMPEG Recording video option enabled in Navigation.
798   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
799   stages[pipeline.nb_stages].cfg     = (void*)&vrc_ffmpeg;
800   stages[pipeline.nb_stages].funcs   = video_ffmpeg_recorder_funcs;
801
802   pipeline.nb_stages++;
803 #endif
804 #endif // RECORD_VIDEO
805
806
807 #ifdef PC_USE_VISION
808   draw_trackers_cfg.last_decoded_frame_info = &vec;
809   stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
810   stages[pipeline.nb_stages].cfg     = (void*)&draw_trackers_cfg;
811   stages[pipeline.nb_stages].funcs   = draw_trackers_funcs;
812
813   pipeline.nb_stages++;
814 #endif
815
816   stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
817   stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
818   stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;
819
820   pipeline.nb_stages++;
821
822   stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
823   stages[pipeline.nb_stages].cfg     = (void*)&gtkconf;
824   stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;
825
826   pipeline.nb_stages++;
827
828   pipeline.stages = &stages[0];
829
830   // Wait for ihm image window to be visible
831   while( !ardrone_tool_exit() && image_vision_window_view != WINDOW_VISIBLE ) {
832     vp_os_delay( 200 );
833   }
834
835   if( !ardrone_tool_exit() )
836   {
837     PRINT("\n   IHM stage vision thread initialisation\n\n");
838
839     res = vp_api_open(&pipeline, &pipeline_handle);
840
841     if( SUCCEED(res) )
842     {
843       int loop = SUCCESS;
844       out.status = VP_API_STATUS_PROCESSING;
845
846       while( !ardrone_tool_exit() && (loop == SUCCESS) )
847       {
848 #ifdef ND_WRITE_TO_FILE
849         num_picture_decoded = vec.num_picture_decoded;
850 #endif
851         if( image_vision_window_view == WINDOW_VISIBLE ) {
852           if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
853             if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
854               loop = SUCCESS;
855             }
856           }
857           else loop = -1; // Finish this thread
858         }
859         // vp_os_delay( 25 );
860       }
861
862       vp_api_close(&pipeline, &pipeline_handle);
863     }
864   }
865
866   PRINT("   IHM stage vision thread ended\n\n");
867
868   return (THREAD_RET)0;
869 }
870 #endif // RAW_CAPTURE