first import
[mardrone] / mardrone / video.cpp
1 #include "video.h"
2 #include <QGraphicsView>
3
4 DroneVideo::DroneVideo()
5 {
6      droneHost.setAddress("192.168.1.1");
7      initialized=false;
8 }
9
10 VideoThread::VideoThread(DroneVideo *parentp,QHostAddress host,QImage *_image)
11 {
12     image=_image;
13     qDebug() << "videoThread::videoThread";
14     stopped=false;
15     parent=parentp;
16     videoSock=new QUdpSocket();
17     videoSock->bind(QHostAddress::Any,5555);
18     droneHost=host;
19     start();
20
21 };
22
23 void DroneVideo::paint(QPainter *painter,const QStyleOptionGraphicsItem *option,
24                         QWidget *widget)
25  {
26     if(!initialized) { // We need initialize QImage here because we don't know display depth before
27         int depth=0;
28
29         depth=painter->device()->depth();
30         qDebug() << "depth=" << depth;
31
32         if(depth==24) {
33             image=new QImage(320,240, QImage::Format_RGB32);
34              image->fill(0x555555);
35         }
36         else {
37             image=new QImage(320,240, QImage::Format_RGB16);
38             image->fill(0x5555);
39         }
40         QPainter p(image);
41         p.drawLine(0,0,image->width(),image->height());
42         p.drawLine(image->width(),0,0,image->height());
43         update(boundingRect());
44         videoThread=new VideoThread(this,droneHost,image);
45         initialized=true;
46     } else
47     painter->drawImage(boundingRect(),*image,image->rect());
48  };
49
50 QRectF DroneVideo::boundingRect() const
51 {
52     return QRectF(0.0,0.0,size().width(),size().height());
53 }
54
55
56 void VideoThread::run()
57 {
58 #define ACQ_WIDTH     320
59 #define ACQ_HEIGHT    240
60 #undef memset
61     memset(&controller,0,sizeof(controller));
62     memset(&picture,0,sizeof(picture));
63     pictureWidth= image->width();
64     pictureHeight=image->height();
65     int codec_type=UVLC_CODEC;
66     qDebug() << "videoThread::run()";
67     stateTimer=new QTimer();
68     connect(stateTimer,SIGNAL(timeout()),this,SLOT(timer()));
69     connect(videoSock,SIGNAL(readyRead()),this,SLOT(videoDataReady()));
70     qDebug() << "videoThread::run() 2";
71     luma_only=FALSE;
72     num_picture_decoded=0;
73     /// Picture configuration
74     picture.format        = PIX_FMT_YUV420P;
75     picture.width         = pictureWidth;
76     picture.height        = pictureHeight;
77     picture.framerate     = 30;
78     picture.y_buf         = (uint8_t*)(void*)vp_os_malloc((size_t) pictureWidth*pictureHeight );
79     picture.cr_buf        = (uint8_t*)vp_os_malloc( pictureWidth*pictureHeight/4 );
80     picture.cb_buf        = (uint8_t*)vp_os_malloc( pictureWidth*pictureHeight/4 );
81     picture.y_line_size   = pictureWidth;
82     picture.cb_line_size  = pictureWidth / 2;
83     picture.cr_line_size  = pictureWidth / 2;
84     picture.y_pad         = 0;
85     picture.c_pad         = 0;
86     qDebug() << "videoThread::run() 3";
87     video_codec_open(&controller, (codec_type_t)UVLC_CODEC);
88     //stateTimer->start(1000);
89     qDebug() << "videoThread::run() initialized";
90     sendVideoPort("AT");
91     while(!stopped) {
92         exec();
93     }
94
95 }
96
97 void VideoThread::timer()
98 {
99   //  qDebug() << "thread Timer";
100
101 }
102
103 void VideoThread::sendVideoPort(QString cmd)
104 {
105     QByteArray dgram;
106     dgram=cmd.toLatin1();
107     qDebug() << "videoThread::sendCmd= " << cmd+"\n" << "to " << droneHost ;
108     videoSock->writeDatagram(dgram.data(),dgram.size(),droneHost,5555);
109 }
110
111 void VideoThread::videoDataReady()
112 {
113    qint64 l;
114    QByteArray videoData;
115
116    QHostAddress host;
117    quint16 port;
118    videoData.resize(videoSock->pendingDatagramSize ());
119    l=videoSock->readDatagram(videoData.data(),videoData.size(),&host,&port);
120    qDebug() << "videoThread::videoDataReady" <<" l=" << l << "from"  << host ;
121    decodeTransform(videoData);
122 }
123
124 void VideoThread::decodeTransform(QByteArray &videoData)
125 {
126     controller.in_stream.bytes   = (uint32_t*)videoData.data();
127     controller.in_stream.used    = videoData.size();
128     controller.in_stream.size    = videoData.size();
129     controller.in_stream.index   = 0;
130     controller.in_stream.length  = 32;
131     controller.in_stream.code    = 0;
132
133     bool_t got_image = FALSE;
134     //qDebug() <<"VideoThread::decodeTransform";
135     video_decode_blockline( &controller, &picture, &got_image );
136     //qDebug() <<"VideoThread::decodeTransform 2";
137     //video_decode_picture( &controller, &picture, &stream, &got_image );
138     if( got_image )
139         {
140             qDebug() <<"VideoThread::decodeTransform got image" << picture.width << picture.height << image->byteCount() << image->bytesPerLine();
141           // we got one picture
142           // out->size = 1;
143           picture.complete     = 1;
144           num_picture_decoded++;
145           vp_stages_YUV420P_to_RGB565(NULL,&picture,image->bits(),image->bytesPerLine());
146
147
148           qDebug() << "pic " << num_picture_decoded;
149         }
150
151
152 };
153
154