1 /********************************************************************
3 * Parts of the DirectX code are from a tutorial by Microsoft
4 * which can be found in the Microsoft DirectX SDK June 2010.
5 * Copyright (c) Microsoft Corporation. All rights reserved.
8 * The rest of the code is COPYRIGHT PARROT 2010
10 ********************************************************************
11 * PARROT - A.R.Drone SDK Windows Client Example
12 *-----------------------------------------------------------------*/
14 * @file directx_rendering.h
15 * @brief Module rendering the drone video stream on a textured Direct3D polygone.
17 * @author Stephane Piskorski <stephane.piskorski.ext@parrot.fr>
20 *******************************************************************/
24 //-----------------------------------------------------------------------------
\r
25 // Display manager for the Win32 SDK Demo application
\r
26 // Based on the Microsoft DirectX SDK tutorials
\r
27 //-----------------------------------------------------------------------------
\r
29 #include <custom_code.h>
\r
30 #include "directx_rendering.h"
\r
33 //-----------------------------------------------------------------------------
\r
35 //-----------------------------------------------------------------------------
\r
36 LPDIRECT3D9 g_pD3D = NULL; // Used to create the D3DDevice
\r
37 LPDIRECT3DDEVICE9 g_pd3dDevice = NULL; // Our rendering device
\r
38 LPDIRECT3DVERTEXBUFFER9 g_pVB = NULL; // Buffer to hold vertices
\r
39 LPDIRECT3DTEXTURE9 g_pTexture = NULL; // Our texture
\r
41 static int videoWidth = DRONE_VIDEO_MAX_WIDTH;
\r
42 static int videoHeight = DRONE_VIDEO_MAX_HEIGHT;
\r
46 //-----------------------------------------------------------------------------
\r
48 // Desc: Initializes Direct3D
\r
49 //-----------------------------------------------------------------------------
\r
50 HRESULT InitD3D( HWND hWnd )
\r
52 // Create the D3D object.
\r
53 if( NULL == ( g_pD3D = Direct3DCreate9( D3D_SDK_VERSION ) ) )
\r
56 // Set up the structure used to create the D3DDevice. Since we are now
\r
57 // using more complex geometry, we will create a device with a zbuffer.
\r
58 D3DPRESENT_PARAMETERS d3dpp;
\r
59 ZeroMemory( &d3dpp, sizeof( d3dpp ) );
\r
60 d3dpp.Windowed = TRUE;
\r
61 d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
\r
62 d3dpp.BackBufferFormat = D3DFMT_UNKNOWN;
\r
63 d3dpp.EnableAutoDepthStencil = TRUE;
\r
64 d3dpp.AutoDepthStencilFormat = D3DFMT_D16;
\r
66 // Create the D3DDevice
\r
67 if( FAILED( g_pD3D->CreateDevice( D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
\r
68 D3DCREATE_SOFTWARE_VERTEXPROCESSING,
\r
69 &d3dpp, &g_pd3dDevice ) ) )
\r
75 g_pd3dDevice->SetRenderState( D3DRS_CULLMODE, D3DCULL_NONE );
\r
77 // Turn off D3D lighting
\r
78 g_pd3dDevice->SetRenderState( D3DRS_LIGHTING, FALSE );
\r
80 // Turn on the zbuffer
\r
81 g_pd3dDevice->SetRenderState( D3DRS_ZENABLE, TRUE );
\r
89 //-----------------------------------------------------------------------------
\r
90 // Name: InitGeometry()
\r
91 // Desc: Create the textures and vertex buffers
\r
92 //-----------------------------------------------------------------------------
\r
93 HRESULT InitGeometry()
\r
95 if( FAILED( D3DXCreateTexture(g_pd3dDevice,
\r
96 TEXTURE_WIDTH,TEXTURE_HEIGHT,
\r
103 MessageBox( NULL, L"Could not create texture for video rendering", L"Bad news...", MB_OK );
\r
107 // Create the vertex buffer.
\r
108 if( FAILED( g_pd3dDevice->CreateVertexBuffer( 4 * 2 * sizeof( CUSTOMVERTEX ),
\r
109 0, D3DFVF_CUSTOMVERTEX,
\r
110 D3DPOOL_DEFAULT, &g_pVB, NULL ) ) )
\r
115 // Fill the vertex buffer. We are setting the tu and tv texture
\r
116 // coordinates, which range from 0.0 to 1.0
\r
117 CUSTOMVERTEX* pVertices;
\r
118 if( FAILED( g_pVB->Lock( 0, 0, ( void** )&pVertices, 0 ) ) )
\r
121 // Create four points coordinates to form a quad
\r
123 pVertices[0].position = D3DXVECTOR3( -1.0f , 1.0 , 0.0f );
\r
124 pVertices[1].position = D3DXVECTOR3( 1.0f , 1.0 , 0.0f );
\r
125 pVertices[2].position = D3DXVECTOR3( -1.0 , -1.0f , 0.0f );
\r
126 pVertices[3].position = D3DXVECTOR3( 1.0f , -1.0f , 0.0f );
\r
128 for(int i=0;i<4;i++) { pVertices[i].color=0xffffffff; }
\r
130 float scaleFactorW = (float)(videoWidth) /(float)(TEXTURE_WIDTH);
\r
131 float scaleFactorH = (float)(videoHeight)/(float)(TEXTURE_HEIGHT);
\r
133 pVertices[0].tu = 0.0f; pVertices[0].tv = 0.0f;
\r
134 pVertices[1].tu = scaleFactorW; pVertices[1].tv = 0.0f;
\r
135 pVertices[2].tu = 0.0f; pVertices[2].tv = scaleFactorH;
\r
136 pVertices[3].tu = scaleFactorW; pVertices[3].tv = scaleFactorH;
\r
144 unsigned char videoFrame[TEXTURE_WIDTH*TEXTURE_HEIGHT*4];
\r
147 //-----------------------------------------------------------------------------
\r
148 // Name: D3DChangeTexture()
\r
149 // Desc: Loads the texture from an external RGB buffer
\r
150 //-----------------------------------------------------------------------------
\r
151 extern "C" void D3DChangeTexture(unsigned char* rgbtexture)
\r
155 unsigned char*rgb_src=rgbtexture;
\r
156 unsigned char*xrgb_dest=videoFrame;
\r
158 for(i=0;i<videoHeight;i++){
\r
159 xrgb_dest = videoFrame+i*TEXTURE_WIDTH*4;
\r
160 rgb_src = rgbtexture+i*DRONE_VIDEO_MAX_WIDTH*3;
\r
161 for (j=0;j<videoWidth;j++){
\r
162 char r = *(rgb_src++);
\r
163 char g = *(rgb_src++);
\r
164 char b = *(rgb_src++);
\r
169 *(xrgb_dest++)=255; /* unused channel */
\r
173 extern "C" void D3DChangeTextureSize(int w,int h)
\r
176 Makes sure the 3D object was built.
\r
177 It might not be, since this function is called by the video pipeline
\r
178 thread which can start before the Direct3D thread.
\r
180 if (g_pVB==NULL) return;
\r
182 if (w!=videoWidth || h!=videoHeight)
\r
184 videoWidth = min(w,DRONE_VIDEO_MAX_WIDTH);
\r
185 videoHeight = min(h,DRONE_VIDEO_MAX_HEIGHT);
\r
188 Change the texture coordinates for the 3D object which renders the video.
\r
189 The texture buffer has a fixed and large size, but only part of it is filled
\r
190 by D3DChangeTexture.
\r
192 CUSTOMVERTEX* pVertices;
\r
193 if( !FAILED( g_pVB->Lock( 0, 0, ( void** )&pVertices, 0 ) ) )
\r
195 float scaleFactorW = (float)(videoWidth) /(float)(TEXTURE_WIDTH);
\r
196 float scaleFactorH = (float)(videoHeight)/(float)(TEXTURE_HEIGHT);
\r
198 pVertices[0].tu = 0.0f; pVertices[0].tv = 0.0f;
\r
199 pVertices[1].tu = scaleFactorW; pVertices[1].tv = 0.0f;
\r
200 pVertices[2].tu = 0.0f; pVertices[2].tv = scaleFactorH;
\r
201 pVertices[3].tu = scaleFactorW; pVertices[3].tv = scaleFactorH;
\r
208 //-----------------------------------------------------------------------------
\r
210 // Desc: Releases all previously initialized objects
\r
211 //-----------------------------------------------------------------------------
\r
214 if( g_pTexture != NULL )
\r
215 g_pTexture->Release();
\r
217 if( g_pVB != NULL )
\r
220 if( g_pd3dDevice != NULL )
\r
221 g_pd3dDevice->Release();
\r
223 if( g_pD3D != NULL )
\r
229 //-----------------------------------------------------------------------------
\r
230 // Name: SetupMatrices()
\r
231 // Desc: Sets up the world, view, and projection transform matrices.
\r
232 //-----------------------------------------------------------------------------
\r
233 VOID SetupMatrices()
\r
235 // Set up world matrix
\r
236 D3DXMATRIXA16 matWorld;
\r
237 D3DXMatrixIdentity( &matWorld );
\r
238 D3DXMatrixRotationX( &matWorld, 0.0f );
\r
239 g_pd3dDevice->SetTransform( D3DTS_WORLD, &matWorld );
\r
241 // Set up our view matrix. A view matrix can be defined given an eye point,
\r
242 // a point to lookat, and a direction for which way is up. Here, we set the
\r
243 // eye five units back along the z-axis and up three units, look at the
\r
244 // origin, and define "up" to be in the y-direction.
\r
245 D3DXVECTOR3 vEyePt( 0.0f, 0.0f,-2.0f );
\r
246 D3DXVECTOR3 vLookatPt( 0.0f, 0.0f, 0.0f );
\r
247 D3DXVECTOR3 vUpVec( 0.0f, 1.0f, 0.0f );
\r
248 D3DXMATRIXA16 matView;
\r
249 D3DXMatrixLookAtLH( &matView, &vEyePt, &vLookatPt, &vUpVec );
\r
250 g_pd3dDevice->SetTransform( D3DTS_VIEW, &matView );
\r
252 // For the projection matrix, we set up a perspective transform (which
\r
253 // transforms geometry from 3D view space to 2D viewport space, with
\r
254 // a perspective divide making objects smaller in the distance). To build
\r
255 // a perpsective transform, we need the field of view (1/4 pi is common),
\r
256 // the aspect ratio, and the near and far clipping planes (which define at
\r
257 // what distances geometry should be no longer be rendered).
\r
258 D3DXMATRIXA16 matProj;
\r
259 D3DXMatrixPerspectiveFovLH( &matProj, D3DX_PI / 4, 1.0f, 1.0f, 100.0f );
\r
260 g_pd3dDevice->SetTransform( D3DTS_PROJECTION, &matProj );
\r
266 //-----------------------------------------------------------------------------
\r
268 // Desc: Draws the scene
\r
269 //-----------------------------------------------------------------------------
\r
272 // Clear the backbuffer and the zbuffer
\r
273 g_pd3dDevice->Clear( 0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER,
\r
274 D3DCOLOR_XRGB( 0, 0, 255 ), 1.0f, 0 );
\r
277 D3DLOCKED_RECT locked;
\r
278 if(g_pTexture->LockRect(0, &locked, NULL, /*D3DLOCK_DISCARD*/0)==D3D_OK)
\r
280 memcpy(locked.pBits, videoFrame, TEXTURE_WIDTH*TEXTURE_HEIGHT*3);
\r
281 g_pTexture->UnlockRect(0);
\r
285 if( SUCCEEDED( g_pd3dDevice->BeginScene() ) )
\r
287 // Setup the world, view, and projection matrices
\r
290 // Setup our texture. Using textures introduces the texture stage states,
\r
291 // which govern how textures get blended together (in the case of multiple
\r
292 // textures) and lighting information. In this case, we are modulating
\r
293 // (blending) our texture with the diffuse color of the vertices.
\r
294 g_pd3dDevice->SetTexture( 0, g_pTexture );
\r
295 g_pd3dDevice->SetTextureStageState( 0, D3DTSS_COLOROP, D3DTOP_MODULATE );
\r
296 g_pd3dDevice->SetTextureStageState( 0, D3DTSS_COLORARG1, D3DTA_TEXTURE );
\r
297 g_pd3dDevice->SetTextureStageState( 0, D3DTSS_COLORARG2, D3DTA_DIFFUSE );
\r
298 g_pd3dDevice->SetTextureStageState( 0, D3DTSS_ALPHAOP, D3DTOP_DISABLE );
\r
300 // Render the vertex buffer contents
\r
301 g_pd3dDevice->SetStreamSource( 0, g_pVB, 0, sizeof( CUSTOMVERTEX ) );
\r
302 g_pd3dDevice->SetFVF( D3DFVF_CUSTOMVERTEX );
\r
303 // Draws two triangles (makes a quad that will support our drone video picture)
\r
304 g_pd3dDevice->DrawPrimitive( D3DPT_TRIANGLESTRIP, 0, 2 );
\r
307 g_pd3dDevice->EndScene();
\r
310 // Present the backbuffer contents to the display
\r
311 g_pd3dDevice->Present( NULL, NULL, NULL, NULL );
\r
317 //-----------------------------------------------------------------------------
\r
319 // Desc: The window's message handler
\r
320 //-----------------------------------------------------------------------------
\r
321 LRESULT WINAPI MsgProc( HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam )
\r
327 PostQuitMessage( 0 );
\r
331 return DefWindowProc( hWnd, msg, wParam, lParam );
\r
337 //-----------------------------------------------------------------------------
\r
338 //-----------------------------------------------------------------------------
\r
339 DEFINE_THREAD_ROUTINE(directx_renderer_thread, data)
341 // Register the window class
\r
344 sizeof( WNDCLASSEX ), CS_CLASSDC, MsgProc, 0L, 0L,
\r
345 GetModuleHandle( NULL ), NULL, NULL, NULL, NULL,
\r
346 L"A.R.Drone Video", NULL
\r
348 RegisterClassEx( &wc );
\r
350 // Create the application's window
\r
351 HWND hWnd = CreateWindow( L"A.R.Drone Video", L"A.R.Drone Video",
\r
352 WS_OVERLAPPEDWINDOW, 100, 100, 640, 480,
\r
353 NULL, NULL, wc.hInstance, NULL );
\r
355 // Initialize Direct3D
\r
356 if( SUCCEEDED( InitD3D( hWnd ) ) )
\r
358 // Create the scene geometry
\r
359 if( SUCCEEDED( InitGeometry() ) )
\r
362 ShowWindow( hWnd, SW_SHOWDEFAULT );
\r
363 UpdateWindow( hWnd );
\r
365 // Enter the message loop
\r
367 ZeroMemory( &msg, sizeof( msg ) );
\r
368 while( msg.message != WM_QUIT )
\r
370 if( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) )
\r
372 TranslateMessage( &msg );
\r
373 DispatchMessage( &msg );
\r
381 UnregisterClass( L"A.R.Drone Video", wc.hInstance );
\r
383 /* Tells ARDRoneTool to shutdown */
\r