3 * @author marc-olivier.dzeukou@parrot.com
6 * ihm vision thread implementation
18 #include <gtk/gtkcontainer.h>
22 #include <VP_Api/vp_api.h>
23 #include <VP_Api/vp_api_error.h>
24 #include <VP_Api/vp_api_stage.h>
25 #include <VP_Api/vp_api_picture.h>
26 #include <VP_Stages/vp_stages_io_file.h>
28 #include <VP_Stages/vp_stages_V4L2_i_camif.h>
30 #include <VP_Stages/vp_stages_i_camif.h>
33 #include <VP_Os/vp_os_print.h>
34 #include <VP_Os/vp_os_malloc.h>
35 #include <VP_Os/vp_os_delay.h>
36 #include <VP_Stages/vp_stages_yuv2rgb.h>
37 #include <VP_Stages/vp_stages_buffer_to_picture.h>
40 # include <Vision/vision_draw.h>
41 # include <Vision/vision_stage.h>
47 #include <VP_Stages/vp_stages_io_jpeg.h>
49 #include <VLIB/Stages/vlib_stage_decode.h>
53 #include <ardrone_tool/ardrone_tool.h>
54 #include <ardrone_tool/Com/config_com.h>
56 //#define USE_FFMPEG_RECORD
63 #include <ardrone_tool/Video/video_stage_recorder.h>
64 #ifdef USE_FFMPEG_RECORDER
65 #include <ardrone_tool/Video/video_stage_ffmpeg_recorder.h>
69 #include <ardrone_tool/Video/video_com_stage.h>
72 #include "ihm/ihm_vision.h"
73 #include "ihm/ihm_stages_o_gtk.h"
74 #include "common/mobile_config.h"
80 #define CAMIF_V_CAMERA_USED CAMIF_CAMERA_CRESYN
81 #define CAMIF_H_CAMERA_USED CAMIF_CAMERA_OVTRULY
83 PIPELINE_HANDLE pipeline_handle;
85 extern GtkWidget *ihm_ImageWin, *ihm_ImageEntry[9], *ihm_ImageDA, *ihm_VideoStream_VBox;
86 /* For fullscreen video display */
87 extern GtkWindow *fullscreen_window;
88 extern GtkImage *fullscreen_image;
89 extern GdkScreen *fullscreen;
91 extern int tab_vision_config_params[10];
92 extern int vision_config_options;
93 extern int image_vision_window_view, image_vision_window_status;
94 extern char video_to_play[16];
96 static GtkImage *image = NULL;
97 static GdkPixbuf *pixbuf = NULL;
99 static int32_t pixbuf_width = 0;
100 static int32_t pixbuf_height = 0;
101 static int32_t pixbuf_rowstride = 0;
102 static uint8_t* pixbuf_data = NULL;
107 output_gtk_stage_open( vp_stages_gtk_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
109 // printf("In gtk stage open\n" );
110 /*pixbuf = gdk_pixbuf_new(GDK_COLORSPACE_RGB,
121 void destroy_image_callback( GtkWidget *widget, gpointer data )
126 extern GtkWidget * ihm_fullScreenFixedContainer;
127 extern GtkWidget * ihm_fullScreenHBox;
130 output_gtk_stage_transform( vp_stages_gtk_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
132 vlib_stage_decoding_config_t* vec;
134 if (!ihm_is_initialized) return SUCCESS;
135 if (ihm_ImageWin==NULL) return SUCCESS;
136 if( image_vision_window_view != WINDOW_VISIBLE) return SUCCESS;
138 gdk_threads_enter(); //http://library.gnome.org/devel/gdk/stable/gdk-Threads.html
140 vec = (vlib_stage_decoding_config_t*)cfg->last_decoded_frame_info;
142 pixbuf_width = vec->controller.width;
143 pixbuf_height = vec->controller.height;
144 pixbuf_rowstride = cfg->rowstride;
145 pixbuf_data = (uint8_t*)in->buffers[0];
147 //printf("Taille cfg : %i %i \n",cfg->max_width,cfg->max_height);
151 g_object_unref(pixbuf);
155 pixbuf = gdk_pixbuf_new_from_data(pixbuf_data,
165 if (fullscreen!=NULL && fullscreen_window!=NULL)
167 pixbuf = gdk_pixbuf_scale_simple ( pixbuf,
168 gdk_screen_get_width ( fullscreen ),
169 gdk_screen_get_height ( fullscreen ),
171 GDK_INTERP_BILINEAR) ;
172 /*if (fullscreen_image == NULL)
174 fullscreen_image = (GtkImage*) gtk_image_new_from_pixbuf( pixbuf );
175 //if (fullscreen_image == NULL) { printf("Probleme.\n"); }
176 //gtk_container_add( GTK_CONTAINER( fullscreen_window ), GTK_WIDGET(fullscreen_image) );
177 gtk_fixed_put(ihm_fullScreenFixedContainer,fullscreen_image,0,0);
179 if (fullscreen_image != NULL)
181 gtk_image_set_from_pixbuf(fullscreen_image, pixbuf);
182 //gtk_widget_show_all (GTK_WIDGET(fullscreen_window));
183 gtk_widget_show (GTK_WIDGET(fullscreen_image));
184 //gtk_widget_show(ihm_fullScreenHBox);
189 pixbuf = gdk_pixbuf_scale_simple ( pixbuf,
193 GDK_INTERP_BILINEAR) ;
195 if( image == NULL && pixbuf!=NULL)
197 image = (GtkImage*) gtk_image_new_from_pixbuf( pixbuf );
198 gtk_signal_connect(GTK_OBJECT(image), "destroy", G_CALLBACK(destroy_image_callback), NULL );
199 if(GTK_IS_WIDGET(ihm_ImageWin))
200 if (GTK_IS_WIDGET(ihm_VideoStream_VBox))
201 gtk_container_add( GTK_CONTAINER( ihm_VideoStream_VBox ), (GtkWidget*)image );
203 if( image!=NULL && pixbuf!=NULL )
205 gtk_image_set_from_pixbuf(image, pixbuf);
207 gtk_widget_show_all( ihm_ImageWin );
216 output_gtk_stage_close( vp_stages_gtk_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
222 const vp_api_stage_funcs_t vp_stages_output_gtk_funcs =
225 (vp_api_stage_open_t)output_gtk_stage_open,
226 (vp_api_stage_transform_t)output_gtk_stage_transform,
227 (vp_api_stage_close_t)output_gtk_stage_close
231 static vp_os_mutex_t draw_trackers_update;
232 /*static*/ vp_stages_draw_trackers_config_t draw_trackers_cfg = { 0 };
234 void set_draw_trackers_config(vp_stages_draw_trackers_config_t* cfg)
237 vp_os_mutex_lock( &draw_trackers_update );
238 v = draw_trackers_cfg.last_decoded_frame_info;
239 vp_os_memcpy( &draw_trackers_cfg, cfg, sizeof(draw_trackers_cfg) );
240 draw_trackers_cfg.last_decoded_frame_info = v;
241 vp_os_mutex_unlock( &draw_trackers_update );
244 C_RESULT draw_trackers_stage_open( vp_stages_draw_trackers_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
246 vp_os_mutex_lock( &draw_trackers_update );
249 for( i = 0; i < NUM_MAX_SCREEN_POINTS; i++ )
251 cfg->locked[i] = C_OK;
254 PRINT("Draw trackers inited with %d trackers\n", cfg->num_points);
256 vp_os_mutex_unlock( &draw_trackers_update );
261 C_RESULT draw_trackers_stage_transform( vp_stages_draw_trackers_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
265 vp_os_mutex_lock( &draw_trackers_update );
267 vp_api_picture_t *picture = (vp_api_picture_t *) in->buffers;
272 for(i = 0 ; i < cfg->num_points; i++)
276 screen_point_t point;
278 point = cfg->points[i];
279 // point.x += ACQ_WIDTH / 2;
280 // point.y += ACQ_HEIGHT / 2;
282 if( point.x >= STREAM_WIDTH || point.x < 0 || point.y >= STREAM_HEIGHT || point.y < 0 )
284 PRINT("Bad point (%d,%d) received at index %d on %d points\n", point.x, point.y, i, cfg->num_points);
288 if( SUCCEED(cfg->locked[i]) )
299 vision_trace_cross(&point, dist, color, picture);
303 for(i = 0 ; i < cfg->detected ; i++)
305 //uint32_t centerX,centerY;
306 uint32_t width,height;
307 screen_point_t center;
308 if (cfg->last_decoded_frame_info!=NULL){
309 center.x = cfg->patch_center[i].x*cfg->last_decoded_frame_info->controller.width/1000;
310 center.y = cfg->patch_center[i].y*cfg->last_decoded_frame_info->controller.height/1000;
311 width = cfg->width[i]*cfg->last_decoded_frame_info->controller.width/1000;
312 height = cfg->height[i]*cfg->last_decoded_frame_info->controller.height/1000;
314 width = min(2*center.x,width); width = min(2*(cfg->last_decoded_frame_info->controller.width-center.x),width) -1;
315 height = min(2*center.y,height); width = min(2*(cfg->last_decoded_frame_info->controller.height-center.y),height) -1;
317 vision_trace_colored_rectangle(¢er, width, height, 0, 255, 128, picture);
318 /*Stephane*/vision_trace_colored_rectangle(¢er, width-2, height-2, 200, 128-80, 0, picture); // blue
319 }else{printf("Problem drawing rectangle.\n");}
323 vp_os_mutex_unlock( &draw_trackers_update );
325 out->size = in->size;
326 out->indexBuffer = in->indexBuffer;
327 out->buffers = in->buffers;
329 out->status = VP_API_STATUS_PROCESSING;
334 C_RESULT draw_trackers_stage_close( vp_stages_draw_trackers_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
339 const vp_api_stage_funcs_t draw_trackers_funcs =
342 (vp_api_stage_open_t)draw_trackers_stage_open,
343 (vp_api_stage_transform_t)draw_trackers_stage_transform,
344 (vp_api_stage_close_t)draw_trackers_stage_close
352 #ifdef RECORD_VISION_DATA
354 extern char video_filename[];
356 static void save_vision_attitude(vision_attitude_t* vision_attitude, int32_t custom_data_size )
359 static FILE* fp = NULL;
363 char filename[VIDEO_FILENAME_LENGTH];
366 strcpy( filename, video_filename );
367 dot = strrchr( filename, '.' );
373 fp = fopen(filename, "wb");
378 fwrite( vision_attitude, custom_data_size, 1, fp );
385 DEFINE_THREAD_ROUTINE(ihm_stages_vision, data)
390 vp_api_io_pipeline_t pipeline;
391 vp_api_io_data_t out;
392 vp_api_io_stage_t stages[NB_STAGES];
394 vp_api_picture_t picture;
396 video_com_config_t icc;
397 vp_stages_buffer_to_picture_config_t bpc;
398 vp_stages_yuv2rgb_config_t yuv2rgbconf;
399 vp_stages_gtk_config_t gtkconf;
400 video_stage_recorder_config_t vrc;
402 /// Picture configuration
403 picture.format = PIX_FMT_YUV420P;
405 picture.width = V_ACQ_WIDTH;
406 picture.height = V_ACQ_HEIGHT;
407 picture.framerate = CAMIF_V_FRAMERATE_USED;
409 picture.y_buf = vp_os_malloc( V_ACQ_WIDTH * V_ACQ_HEIGHT );
410 picture.cr_buf = vp_os_malloc( V_ACQ_WIDTH * V_ACQ_HEIGHT / 4 );
411 picture.cb_buf = vp_os_malloc( V_ACQ_WIDTH * V_ACQ_HEIGHT / 4 );
413 picture.y_line_size = V_ACQ_WIDTH;
414 picture.cb_line_size = V_ACQ_WIDTH / 2;
415 picture.cr_line_size = V_ACQ_WIDTH / 2;
417 for(i = 0; i < V_ACQ_WIDTH * V_ACQ_HEIGHT/ 4; i++ )
419 picture.cr_buf[i] = 0x80;
420 picture.cb_buf[i] = 0x80;
423 vp_os_memset(&icc, 0, sizeof( icc ));
424 vp_os_memset(&bpc, 0, sizeof( bpc ));
425 vp_os_memset(&yuv2rgbconf, 0, sizeof( yuv2rgbconf ));
426 vp_os_memset(>kconf, 0, sizeof( gtkconf ));
428 icc.com = COM_VIDEO();
429 icc.buffer_size = 1024;
430 icc.protocol = VP_COM_TCP;
431 COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
433 bpc.picture = &picture;
434 bpc.y_buffer_size = picture.width*picture.height;
435 bpc.y_blockline_size = picture.width*CAMIF_BLOCKLINES; // each blockline have 16 lines
436 bpc.y_current_size = 0;
438 bpc.y_buf_ptr = NULL;
440 bpc.cr_buf_ptr = NULL;
441 bpc.cb_buf_ptr = NULL;
444 bpc.luma_only = FALSE;
446 bpc.luma_only = TRUE;
447 #endif // USE_VIDEO_YUV
450 bpc.block_mode_enable = TRUE;
452 bpc.block_mode_enable = FALSE;
455 #ifdef RECORD_VISION_DATA
456 bpc.custom_data_size = sizeof(vision_attitude_t);
457 bpc.custom_data_handler = (custom_data_handler_cb)save_vision_attitude;
459 bpc.custom_data_size = 0;
460 bpc.custom_data_handler = 0;
463 yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
464 if( CAMIF_H_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
465 yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;
469 pipeline.nb_stages = 0;
471 stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET;
472 stages[pipeline.nb_stages].cfg = (void *)&icc;
473 stages[pipeline.nb_stages].funcs = video_com_funcs;
475 pipeline.nb_stages++;
477 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
478 stages[pipeline.nb_stages].cfg = (void *)&bpc;
479 stages[pipeline.nb_stages].funcs = vp_stages_buffer_to_picture_funcs;
481 pipeline.nb_stages++;
484 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
485 stages[pipeline.nb_stages].cfg = (void*)&vrc;
486 stages[pipeline.nb_stages].funcs = video_recorder_funcs;
488 pipeline.nb_stages++;
489 #endif // RECORD_VIDEO
492 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
493 stages[pipeline.nb_stages].cfg = (void*)&draw_trackers_cfg;
494 stages[pipeline.nb_stages].funcs = draw_trackers_funcs;
496 pipeline.nb_stages++;
499 stages[pipeline.nb_stages].type = VP_API_FILTER_YUV2RGB;
500 stages[pipeline.nb_stages].cfg = (void*)&yuv2rgbconf;
501 stages[pipeline.nb_stages].funcs = vp_stages_yuv2rgb_funcs;
503 pipeline.nb_stages++;
505 stages[pipeline.nb_stages].type = VP_API_OUTPUT_SDL;
506 stages[pipeline.nb_stages].cfg = (vp_stages_gtk_config_t *)>kconf;
507 stages[pipeline.nb_stages].funcs = vp_stages_output_gtk_funcs;
509 pipeline.nb_stages++;
511 pipeline.stages = &stages[0];
513 // Wait for ihm image window to be visible
514 while( !ardrone_tool_exit() && image_vision_window_view != WINDOW_VISIBLE ) {
518 if( !ardrone_tool_exit() )
520 PRINT("\n IHM stage vision thread initialisation\n\n");
522 res = vp_api_open(&pipeline, &pipeline_handle);
527 out.status = VP_API_STATUS_PROCESSING;
529 while( !ardrone_tool_exit() && (loop == SUCCESS) )
531 #ifdef ND_WRITE_TO_FILE
532 num_picture_decoded = bpc.num_picture_decoded;
534 if( image_vision_window_view == WINDOW_VISIBLE ) {
535 if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
536 if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
540 else loop = -1; // Finish this thread
545 vp_api_close(&pipeline, &pipeline_handle);
549 return (THREAD_RET)0;
551 #elif defined(JPEG_CAPTURE)
552 DEFINE_THREAD_ROUTINE(ihm_stages_vision, data)
558 vp_api_io_pipeline_t pipeline;
559 vp_api_io_data_t out;
560 vp_api_io_stage_t stages[NB_STAGES];
562 vp_api_picture_t picture;
564 video_com_config_t icc;
565 vp_stages_decoder_jpeg_config_t jdc;
566 vp_stages_yuv2rgb_config_t yuv2rgbconf;
567 vp_stages_gtk_config_t gtkconf;
568 video_stage_recorder_config_t vrc;
570 /// Picture configuration
571 picture.format = PIX_FMT_YUV420P;
573 picture.width = STREAM_WIDTH;
574 picture.height = STREAM_HEIGHT;
575 picture.framerate = 30;
577 picture.y_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT );
578 picture.cr_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
579 picture.cb_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
581 picture.y_line_size = STREAM_WIDTH;
582 picture.cb_line_size = STREAM_WIDTH / 2;
583 picture.cr_line_size = STREAM_WIDTH / 2;
585 vp_os_memset(&icc, 0, sizeof( icc ));
586 vp_os_memset(&jdc, 0, sizeof( jdc ));
587 vp_os_memset(&yuv2rgbconf, 0, sizeof( yuv2rgbconf ));
588 vp_os_memset(>kconf, 0, sizeof( gtkconf ));
590 icc.com = COM_VIDEO();
591 icc.buffer_size = 8192;
592 icc.protocol = VP_COM_UDP;
593 COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
596 jdc.width = STREAM_WIDTH;
597 jdc.height = STREAM_HEIGHT;
598 jdc.dct_method = JDCT_FLOAT;
600 /* vec.width = ACQ_WIDTH;
601 vec.height = ACQ_HEIGHT;
602 vec.picture = &picture;
604 vec.luma_only = FALSE;
606 vec.luma_only = TRUE;
607 #endif // USE_VIDEO_YUV
608 vec.block_mode_enable = TRUE;*/
610 yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
611 if( CAMIF_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
612 yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;
614 gtkconf.width = picture.width;
615 gtkconf.height = picture.height;
616 gtkconf.rowstride = picture.width * 3;
620 pipeline.nb_stages = 0;
622 stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET;
623 stages[pipeline.nb_stages].cfg = (void *)&icc;
624 stages[pipeline.nb_stages].funcs = video_com_funcs;
626 pipeline.nb_stages++;
628 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
629 stages[pipeline.nb_stages].cfg = (void*)&jdc;
630 stages[pipeline.nb_stages].funcs = vp_stages_decoder_jpeg_funcs;
632 pipeline.nb_stages++;
635 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
636 stages[pipeline.nb_stages].cfg = (void*)&vrc;
637 stages[pipeline.nb_stages].funcs = video_recorder_funcs;
639 pipeline.nb_stages++;
640 #endif // RECORD_VIDEO
642 draw_trackers_cfg.last_decoded_frame_info = &vec;
643 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
644 stages[pipeline.nb_stages].cfg = (void*)&draw_trackers_cfg;
645 stages[pipeline.nb_stages].funcs = draw_trackers_funcs;
647 pipeline.nb_stages++;
649 stages[pipeline.nb_stages].type = VP_API_FILTER_YUV2RGB;
650 stages[pipeline.nb_stages].cfg = (void*)&yuv2rgbconf;
651 stages[pipeline.nb_stages].funcs = vp_stages_yuv2rgb_funcs;
653 pipeline.nb_stages++;
655 stages[pipeline.nb_stages].type = VP_API_OUTPUT_SDL;
656 stages[pipeline.nb_stages].cfg = (vp_stages_gtk_config_t *)>kconf;
657 stages[pipeline.nb_stages].funcs = vp_stages_output_gtk_funcs;
659 pipeline.nb_stages++;
661 pipeline.stages = &stages[0];
663 // Wait for ihm image window to be visible
664 while( !ardrone_tool_exit() && image_vision_window_view != WINDOW_VISIBLE ) {
668 if( !ardrone_tool_exit() )
670 PRINT("\n IHM stage vision thread initialisation\n\n");
672 res = vp_api_open(&pipeline, &pipeline_handle);
677 out.status = VP_API_STATUS_PROCESSING;
679 while( !ardrone_tool_exit() && (loop == SUCCESS) )
681 //#ifdef ND_WRITE_TO_FILE
682 // num_picture_decoded = vec.num_picture_decoded;
684 if( image_vision_window_view == WINDOW_VISIBLE ) {
685 if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
686 if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
690 else loop = -1; // Finish this thread
692 // vp_os_delay( 25 );
695 vp_api_close(&pipeline, &pipeline_handle);
699 PRINT(" IHM stage vision thread ended\n\n");
701 return (THREAD_RET)0;
705 DEFINE_THREAD_ROUTINE(ihm_stages_vision, data)
711 vp_api_io_pipeline_t pipeline;
712 vp_api_io_data_t out;
713 vp_api_io_stage_t stages[NB_STAGES];
715 vp_api_picture_t picture;
717 video_com_config_t icc;
718 vlib_stage_decoding_config_t vec;
719 vp_stages_yuv2rgb_config_t yuv2rgbconf;
720 vp_stages_gtk_config_t gtkconf;
722 video_stage_recorder_config_t vrc;
723 #ifdef USE_FFMPEG_RECORDER
724 video_stage_recorder_config_t vrc_ffmpeg;
727 /// Picture configuration
728 picture.format = PIX_FMT_YUV420P;
729 picture.width = STREAM_WIDTH;
730 picture.height = STREAM_HEIGHT;
731 picture.framerate = 30;
733 picture.y_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT );
734 picture.cr_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
735 picture.cb_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
737 picture.y_line_size = STREAM_WIDTH;
738 picture.cb_line_size = STREAM_WIDTH / 2;
739 picture.cr_line_size = STREAM_WIDTH / 2;
741 vp_os_memset(&icc, 0, sizeof( icc ));
742 vp_os_memset(&vec, 0, sizeof( vec ));
743 vp_os_memset(&yuv2rgbconf, 0, sizeof( yuv2rgbconf ));
744 vp_os_memset(>kconf, 0, sizeof( gtkconf ));
746 icc.com = COM_VIDEO();
747 icc.buffer_size = 100000;
748 icc.protocol = VP_COM_UDP;
749 COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
751 vec.width = STREAM_WIDTH;
752 vec.height = STREAM_HEIGHT;
753 vec.picture = &picture;
755 vec.luma_only = FALSE;
757 vec.luma_only = TRUE;
758 #endif // USE_VIDEO_YUV
759 vec.block_mode_enable = TRUE;
761 vec.luma_only = FALSE;
762 yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
763 if( CAMIF_H_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
764 yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;
765 gtkconf.max_width = picture.width;
766 gtkconf.max_height = picture.height;
767 gtkconf.rowstride = picture.width * 3;
768 gtkconf.last_decoded_frame_info = (void*)&vec;
774 pipeline.nb_stages = 0;
776 stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET;
777 stages[pipeline.nb_stages].cfg = (void *)&icc;
778 stages[pipeline.nb_stages].funcs = video_com_funcs;
780 pipeline.nb_stages++;
782 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
783 stages[pipeline.nb_stages].cfg = (void*)&vec;
784 stages[pipeline.nb_stages].funcs = vlib_decoding_funcs;
786 pipeline.nb_stages++;
789 //#warning Recording video option enabled in Navigation.
790 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
791 stages[pipeline.nb_stages].cfg = (void*)&vrc;
792 stages[pipeline.nb_stages].funcs = video_recorder_funcs;
794 pipeline.nb_stages++;
796 #ifdef USE_FFMPEG_RECORDER
797 //#warning FFMPEG Recording video option enabled in Navigation.
798 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
799 stages[pipeline.nb_stages].cfg = (void*)&vrc_ffmpeg;
800 stages[pipeline.nb_stages].funcs = video_ffmpeg_recorder_funcs;
802 pipeline.nb_stages++;
804 #endif // RECORD_VIDEO
808 draw_trackers_cfg.last_decoded_frame_info = &vec;
809 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
810 stages[pipeline.nb_stages].cfg = (void*)&draw_trackers_cfg;
811 stages[pipeline.nb_stages].funcs = draw_trackers_funcs;
813 pipeline.nb_stages++;
816 stages[pipeline.nb_stages].type = VP_API_FILTER_YUV2RGB;
817 stages[pipeline.nb_stages].cfg = (void*)&yuv2rgbconf;
818 stages[pipeline.nb_stages].funcs = vp_stages_yuv2rgb_funcs;
820 pipeline.nb_stages++;
822 stages[pipeline.nb_stages].type = VP_API_OUTPUT_SDL;
823 stages[pipeline.nb_stages].cfg = (void*)>kconf;
824 stages[pipeline.nb_stages].funcs = vp_stages_output_gtk_funcs;
826 pipeline.nb_stages++;
828 pipeline.stages = &stages[0];
830 // Wait for ihm image window to be visible
831 while( !ardrone_tool_exit() && image_vision_window_view != WINDOW_VISIBLE ) {
835 if( !ardrone_tool_exit() )
837 PRINT("\n IHM stage vision thread initialisation\n\n");
839 res = vp_api_open(&pipeline, &pipeline_handle);
844 out.status = VP_API_STATUS_PROCESSING;
846 while( !ardrone_tool_exit() && (loop == SUCCESS) )
848 #ifdef ND_WRITE_TO_FILE
849 num_picture_decoded = vec.num_picture_decoded;
851 if( image_vision_window_view == WINDOW_VISIBLE ) {
852 if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
853 if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
857 else loop = -1; // Finish this thread
859 // vp_os_delay( 25 );
862 vp_api_close(&pipeline, &pipeline_handle);
866 PRINT(" IHM stage vision thread ended\n\n");
868 return (THREAD_RET)0;
870 #endif // RAW_CAPTURE