ArDrone SDK 1.8 added
[mardrone] / mardrone / ARDrone_SDK_Version_1_8_20110726 / Examples / Win32 / sdk_demo / Sources / Video / video_stage.c
1 /********************************************************************
2  *                    COPYRIGHT PARROT 2010
3  ********************************************************************
4  *       PARROT - A.R.Drone SDK Windows Client Example
5  *-----------------------------------------------------------------*/
6 /**
7  * @file video_stage.c 
8  * @brief Video stream reception code
9  *
10  * @author marc-olivier.dzeukou@parrot.com
11  * @date 2007/07/27
12  *
13  * @author Stephane Piskorski <stephane.piskorski.ext@parrot.fr>
14  * @date   Sept, 8. 2010
15  *
16  *******************************************************************/
17
18
19 #include <stdio.h>
20 #include <stdlib.h>
21 #include <ctype.h>
22
23
24 #include <time.h>
25
26 /* A.R.Drone OS dependant includes */
27         #include <config.h>
28         #include <VP_Os/vp_os_print.h>
29         #include <VP_Os/vp_os_malloc.h>
30         #include <VP_Os/vp_os_delay.h>
31
32 /* A.R.Drone Video API includes */
33         #include <VP_Api/vp_api.h>
34         #include <VP_Api/vp_api_error.h>
35         #include <VP_Api/vp_api_stage.h>
36         #include <VP_Api/vp_api_picture.h>
37         #include <VP_Stages/vp_stages_io_file.h>
38         #include <VP_Stages/vp_stages_i_camif.h>
39         #include <VLIB/Stages/vlib_stage_decode.h>
40         #include <VP_Stages/vp_stages_yuv2rgb.h>
41         #include <VP_Stages/vp_stages_buffer_to_picture.h>
42
43 /* A.R.Drone Tool includes */
44         #include <ardrone_tool/ardrone_tool.h>
45         #include <ardrone_tool/Com/config_com.h>
46         #include <ardrone_tool/UI/ardrone_input.h>
47         #include <ardrone_tool/Video/video_com_stage.h>
48
49 /* Configuration file */
50         #include <win32_custom.h>
51
52 /* Our local pipeline */
53         #include "Video/video_stage.h"
54         
55 #include <UI/directx_rendering.h>
56
57 /* Global variables to build our video pipeline*/
58         #define NB_STAGES 10
59         PIPELINE_HANDLE pipeline_handle;
60         static uint8_t*  pixbuf_data       = NULL;
61         static vp_os_mutex_t  video_update_lock;
62
63
64
65
66
67
68
69
70 /*****************************************************************************/
71 /*
72 \brief Initialization of the video rendering stage.
73 */
74 C_RESULT output_rendering_device_stage_open( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
75 {
76         vp_os_mutex_init(&video_update_lock);
77         return (VP_SUCCESS);
78 }
79
80
81
82 extern uint8_t * FrameBuffer;
83
84
85
86 /*****************************************************************************/
87 /*
88 \brief Video rendering function (called for each received frame from the drone).
89 */
90
91  
92 C_RESULT output_rendering_device_stage_transform( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
93 {
94   vlib_stage_decoding_config_t* vec = (vlib_stage_decoding_config_t*)cfg;
95
96   vp_os_mutex_lock(&video_update_lock);
97  
98   /* Get a reference to the last decoded picture */
99   pixbuf_data      = (uint8_t*)in->buffers[0];
100   
101                 
102   
103   
104                         /** ======= INSERT USER CODE HERE ========== **/
105                 
106                                 // Send the decoded video frame to the DirectX renderer.
107                                 // This is an example; do here whatever you want to do
108                                 //  with the decoded frame.
109   
110                                 /* Send the actual video resolution to the rendering module */
111                                 D3DChangeTextureSize(vec->controller.width,vec->controller.height);
112                                 /* Send video picture to the rendering module */
113                                 D3DChangeTexture(pixbuf_data);
114
115                         /** ======= INSERT USER CODE HERE ========== **/
116                 
117
118
119   
120   
121   vp_os_mutex_unlock(&video_update_lock);
122   return (VP_SUCCESS);
123 }
124
125
126
127
128 /*****************************************************************************/
129 /*
130 \brief Video rendering function (called for each received frame from the drone).
131 */
132 C_RESULT output_rendering_device_stage_close( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
133 {
134   return (VP_SUCCESS);
135 }
136
137
138 /*****************************************************************************/
139 /*
140         List of the functions that define the rendering stage.
141 */
142 const vp_api_stage_funcs_t vp_stages_output_rendering_device_funcs =
143 {
144   NULL,
145   (vp_api_stage_open_t)output_rendering_device_stage_open,
146   (vp_api_stage_transform_t)output_rendering_device_stage_transform,
147   (vp_api_stage_close_t)output_rendering_device_stage_close
148 };
149
150
151
152
153 /*****************************************************************************/
154 /*
155         The video processing thread.
156         This function can be kept as it is by most users.
157         It automatically receives the video stream in a loop, decode it, and then 
158                 call the 'output_rendering_device_stage_transform' function for each decoded frame.
159 */
160 DEFINE_THREAD_ROUTINE(video_stage, data)
161 {
162   C_RESULT res;
163
164   vp_api_io_pipeline_t    pipeline;
165   vp_api_io_data_t        out;
166   vp_api_io_stage_t       stages[NB_STAGES];
167
168   vp_api_picture_t picture;
169
170   video_com_config_t              icc;
171   vlib_stage_decoding_config_t    vec;
172   vp_stages_yuv2rgb_config_t      yuv2rgbconf;
173   
174
175   /* Picture configuration */
176           picture.format        = PIX_FMT_YUV420P;
177
178           picture.width         = DRONE_VIDEO_MAX_WIDTH;
179           picture.height        = DRONE_VIDEO_MAX_HEIGHT;
180           picture.framerate     = 15;
181
182           picture.y_buf   = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT     );
183           picture.cr_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );
184           picture.cb_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );
185
186           picture.y_line_size   = DRONE_VIDEO_MAX_WIDTH;
187           picture.cb_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;
188           picture.cr_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;
189
190           vp_os_memset(&icc,          0, sizeof( icc ));
191           vp_os_memset(&vec,          0, sizeof( vec ));
192           vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));
193
194    /* Video socket configuration */
195           icc.com                 = COM_VIDEO();
196           icc.buffer_size         = 100000;
197           icc.protocol            = VP_COM_UDP;
198   
199           COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
200
201   /* Video decoder configuration */
202           /* Size of the buffers used for decoding 
203          This must be set to the maximum possible video resolution used by the drone 
204          The actual video resolution will be stored by the decoder in vec.controller 
205                  (see vlib_stage_decode.h) */
206           vec.width               = DRONE_VIDEO_MAX_WIDTH;
207           vec.height              = DRONE_VIDEO_MAX_HEIGHT;
208           vec.picture             = &picture;
209           vec.block_mode_enable   = TRUE;
210           vec.luma_only           = FALSE;
211
212   yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
213
214    /* Video pipeline building */
215   
216                  pipeline.nb_stages = 0;
217
218                 /* Video stream reception */
219                 stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
220                 stages[pipeline.nb_stages].cfg     = (void *)&icc;
221                 stages[pipeline.nb_stages].funcs   = video_com_funcs;
222
223                 pipeline.nb_stages++;
224
225                 /* Video stream decoding */
226                 stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
227                 stages[pipeline.nb_stages].cfg     = (void*)&vec;
228                 stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;
229
230                 pipeline.nb_stages++;
231
232                 /* YUV to RGB conversion 
233                 YUV format is used by the video stream protocol
234                 Remove this stage if your rendering device can handle 
235                 YUV data directly
236                 */
237                 stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
238                 stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
239                 stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;
240
241                 pipeline.nb_stages++;
242
243                 /* User code */  
244                 stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;  /* Set to VP_API_OUTPUT_SDL even if SDL is not used */
245                 stages[pipeline.nb_stages].cfg     = (void*)&vec;   /* give the decoder information to the renderer */
246                 stages[pipeline.nb_stages].funcs   = vp_stages_output_rendering_device_funcs;
247
248                   pipeline.nb_stages++;
249                   pipeline.stages = &stages[0];
250  
251  
252                   /* Processing of a pipeline */
253                           if( !ardrone_tool_exit() )
254                           {
255                                 PRINT("\n   Video stage thread initialisation\n\n");
256
257                                 res = vp_api_open(&pipeline, &pipeline_handle);
258
259                                 if( VP_SUCCEEDED(res) )
260                                 {
261                                   int loop = VP_SUCCESS;
262                                   out.status = VP_API_STATUS_PROCESSING;
263
264                                   while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
265                                   {
266                                           if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
267                                                 if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
268                                                   loop = VP_SUCCESS;
269                                                 }
270                                           }
271                                           else loop = -1; // Finish this thread
272                                   }
273
274                                   vp_api_close(&pipeline, &pipeline_handle);
275                                 }
276                         }
277
278   PRINT("   Video stage thread ended\n\n");
279
280   return (THREAD_RET)0;
281 }
282