1 /********************************************************************
2 * COPYRIGHT PARROT 2010
3 ********************************************************************
4 * PARROT - A.R.Drone SDK Windows Client Example
5 *-----------------------------------------------------------------*/
8 * @brief Video stream reception code
10 * @author marc-olivier.dzeukou@parrot.com
13 * @author Stephane Piskorski <stephane.piskorski.ext@parrot.fr>
16 *******************************************************************/
26 /* A.R.Drone OS dependant includes */
28 #include <VP_Os/vp_os_print.h>
29 #include <VP_Os/vp_os_malloc.h>
30 #include <VP_Os/vp_os_delay.h>
32 /* A.R.Drone Video API includes */
33 #include <VP_Api/vp_api.h>
34 #include <VP_Api/vp_api_error.h>
35 #include <VP_Api/vp_api_stage.h>
36 #include <VP_Api/vp_api_picture.h>
37 #include <VP_Stages/vp_stages_io_file.h>
38 #include <VP_Stages/vp_stages_i_camif.h>
39 #include <VLIB/Stages/vlib_stage_decode.h>
40 #include <VP_Stages/vp_stages_yuv2rgb.h>
41 #include <VP_Stages/vp_stages_buffer_to_picture.h>
43 /* A.R.Drone Tool includes */
44 #include <ardrone_tool/ardrone_tool.h>
45 #include <ardrone_tool/Com/config_com.h>
46 #include <ardrone_tool/UI/ardrone_input.h>
47 #include <ardrone_tool/Video/video_com_stage.h>
49 /* Configuration file */
50 #include <win32_custom.h>
52 /* Our local pipeline */
53 #include "Video/video_stage.h"
55 #include <UI/directx_rendering.h>
57 /* Global variables to build our video pipeline*/
59 PIPELINE_HANDLE pipeline_handle;
60 static uint8_t* pixbuf_data = NULL;
61 static vp_os_mutex_t video_update_lock;
70 /*****************************************************************************/
72 \brief Initialization of the video rendering stage.
74 C_RESULT output_rendering_device_stage_open( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
76 vp_os_mutex_init(&video_update_lock);
82 extern uint8_t * FrameBuffer;
86 /*****************************************************************************/
88 \brief Video rendering function (called for each received frame from the drone).
92 C_RESULT output_rendering_device_stage_transform( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
94 vlib_stage_decoding_config_t* vec = (vlib_stage_decoding_config_t*)cfg;
96 vp_os_mutex_lock(&video_update_lock);
98 /* Get a reference to the last decoded picture */
99 pixbuf_data = (uint8_t*)in->buffers[0];
104 /** ======= INSERT USER CODE HERE ========== **/
106 // Send the decoded video frame to the DirectX renderer.
107 // This is an example; do here whatever you want to do
108 // with the decoded frame.
110 /* Send the actual video resolution to the rendering module */
111 D3DChangeTextureSize(vec->controller.width,vec->controller.height);
112 /* Send video picture to the rendering module */
113 D3DChangeTexture(pixbuf_data);
115 /** ======= INSERT USER CODE HERE ========== **/
121 vp_os_mutex_unlock(&video_update_lock);
128 /*****************************************************************************/
130 \brief Video rendering function (called for each received frame from the drone).
132 C_RESULT output_rendering_device_stage_close( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
138 /*****************************************************************************/
140 List of the functions that define the rendering stage.
142 const vp_api_stage_funcs_t vp_stages_output_rendering_device_funcs =
145 (vp_api_stage_open_t)output_rendering_device_stage_open,
146 (vp_api_stage_transform_t)output_rendering_device_stage_transform,
147 (vp_api_stage_close_t)output_rendering_device_stage_close
153 /*****************************************************************************/
155 The video processing thread.
156 This function can be kept as it is by most users.
157 It automatically receives the video stream in a loop, decode it, and then
158 call the 'output_rendering_device_stage_transform' function for each decoded frame.
160 DEFINE_THREAD_ROUTINE(video_stage, data)
164 vp_api_io_pipeline_t pipeline;
165 vp_api_io_data_t out;
166 vp_api_io_stage_t stages[NB_STAGES];
168 vp_api_picture_t picture;
170 video_com_config_t icc;
171 vlib_stage_decoding_config_t vec;
172 vp_stages_yuv2rgb_config_t yuv2rgbconf;
175 /* Picture configuration */
176 picture.format = PIX_FMT_YUV420P;
178 picture.width = DRONE_VIDEO_MAX_WIDTH;
179 picture.height = DRONE_VIDEO_MAX_HEIGHT;
180 picture.framerate = 15;
182 picture.y_buf = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT );
183 picture.cr_buf = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );
184 picture.cb_buf = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );
186 picture.y_line_size = DRONE_VIDEO_MAX_WIDTH;
187 picture.cb_line_size = DRONE_VIDEO_MAX_WIDTH / 2;
188 picture.cr_line_size = DRONE_VIDEO_MAX_WIDTH / 2;
190 vp_os_memset(&icc, 0, sizeof( icc ));
191 vp_os_memset(&vec, 0, sizeof( vec ));
192 vp_os_memset(&yuv2rgbconf, 0, sizeof( yuv2rgbconf ));
194 /* Video socket configuration */
195 icc.com = COM_VIDEO();
196 icc.buffer_size = 100000;
197 icc.protocol = VP_COM_UDP;
199 COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
201 /* Video decoder configuration */
202 /* Size of the buffers used for decoding
203 This must be set to the maximum possible video resolution used by the drone
204 The actual video resolution will be stored by the decoder in vec.controller
205 (see vlib_stage_decode.h) */
206 vec.width = DRONE_VIDEO_MAX_WIDTH;
207 vec.height = DRONE_VIDEO_MAX_HEIGHT;
208 vec.picture = &picture;
209 vec.block_mode_enable = TRUE;
210 vec.luma_only = FALSE;
212 yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
214 /* Video pipeline building */
216 pipeline.nb_stages = 0;
218 /* Video stream reception */
219 stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET;
220 stages[pipeline.nb_stages].cfg = (void *)&icc;
221 stages[pipeline.nb_stages].funcs = video_com_funcs;
223 pipeline.nb_stages++;
225 /* Video stream decoding */
226 stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
227 stages[pipeline.nb_stages].cfg = (void*)&vec;
228 stages[pipeline.nb_stages].funcs = vlib_decoding_funcs;
230 pipeline.nb_stages++;
232 /* YUV to RGB conversion
233 YUV format is used by the video stream protocol
234 Remove this stage if your rendering device can handle
237 stages[pipeline.nb_stages].type = VP_API_FILTER_YUV2RGB;
238 stages[pipeline.nb_stages].cfg = (void*)&yuv2rgbconf;
239 stages[pipeline.nb_stages].funcs = vp_stages_yuv2rgb_funcs;
241 pipeline.nb_stages++;
244 stages[pipeline.nb_stages].type = VP_API_OUTPUT_SDL; /* Set to VP_API_OUTPUT_SDL even if SDL is not used */
245 stages[pipeline.nb_stages].cfg = (void*)&vec; /* give the decoder information to the renderer */
246 stages[pipeline.nb_stages].funcs = vp_stages_output_rendering_device_funcs;
248 pipeline.nb_stages++;
249 pipeline.stages = &stages[0];
252 /* Processing of a pipeline */
253 if( !ardrone_tool_exit() )
255 PRINT("\n Video stage thread initialisation\n\n");
257 res = vp_api_open(&pipeline, &pipeline_handle);
259 if( VP_SUCCEEDED(res) )
261 int loop = VP_SUCCESS;
262 out.status = VP_API_STATUS_PROCESSING;
264 while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
266 if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
267 if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
271 else loop = -1; // Finish this thread
274 vp_api_close(&pipeline, &pipeline_handle);
278 PRINT(" Video stage thread ended\n\n");
280 return (THREAD_RET)0;