Chinaunix首页 | 论坛 | 博客
  • 博客访问: 82524
  • 博文数量: 25
  • 博客积分: 0
  • 博客等级: 民兵
  • 技术积分: 98
  • 用 户 组: 普通用户
  • 注册时间: 2014-10-10 00:42
文章分类

全部博文(25)

分类: 嵌入式

2016-05-24 00:53:45

        写在最前面:这是自己的毕业,参考网上了很多大神的代码,没有你们我的毕设无法完成,谢谢你们,在此,将工程中全部代码开源出来,希望对有需要的童鞋有所帮助。

     

        系统使用ARM处理器,运行linux系统,Qt创建工程编写主程序,可以使用OpenCV进行图像处理。
       通过两个摄像头采集会图像,在LCD上进行显示,然后通过LCD上的按钮实现退出程序、保存图像、显示灰度三个功能。细节的东西就不说了,网上一抓一大把,直接上代码。。。
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>走----------------你<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

这是widget.h

点击(此处)折叠或打开

  1. #ifndef WIDGET_H
  2. #define WIDGET_H

  3. #include <QWidget>
  4. #include <QtGui>
  5. #include <QDateTime>
  6. #include <cv.h>
  7. #include <highgui.h>
  8. #include <cxcore.h>
  9. #include "camthread.h"
  10. #include "globaldata.h"

  11. namespace Ui {
  12. class Widget;
  13. }

  14. class Widget : public QWidget
  15. {
  16.     Q_OBJECT

  17. public:
  18.     explicit Widget(QWidget *parent = 0);
  19.     ~Widget();

  20. private slots:
  21.     void slotShowImage(int num, unsigned char* rgb);
  22.     void slotGrayImage();
  23.     void slotSaveImage();

  24. private:
  25.     Ui::Widget *ui;

  26.     volatile bool m_saveFlag_L;
  27.     volatile bool m_saveFlag_R;
  28.     volatile bool m_grayFlag_L;
  29.     volatile bool m_grayFlag_R;

  30.     CamThread* m_cam_L;
  31.     CamThread* m_cam_R;
  32. };

  33. #endif // WIDGET_H

这是widget.cpp

点击(此处)折叠或打开

  1. #include "widget.h"
  2. #include "ui_widget.h"

  3. Widget::Widget(QWidget *parent) :
  4.     QWidget(parent),
  5.     ui(new Ui::Widget)
  6. {
  7.     ui->setupUi(this);

  8.     //flag set
  9.     m_saveFlag_L = 0;
  10.     m_saveFlag_R = 0;
  11.     m_grayFlag_L = 0;
  12.     m_grayFlag_R = 0;
  13.     //camera setup
  14.     m_cam_L = new CamThread("/dev/video1", LEFT_CAM);
  15.     if(m_cam_L->initCam() == -1)
  16.         printf("open %s error\n", m_cam_L->m_devName.toStdString().c_str());
  17.     connect(m_cam_L, SIGNAL(sigGotImage(int, unsigned char*)), this, SLOT(slotShowImage(int, unsigned char*)));

  18.     m_cam_R = new CamThread("/dev/video2", RIGHT_CAM);
  19.     if(m_cam_R->initCam() == -1)
  20.         printf("open %s error\n", m_cam_L->m_devName.toStdString().c_str());
  21.     connect(m_cam_R, SIGNAL(sigGotImage(int, unsigned char*)), this, SLOT(slotShowImage(int, unsigned char*)));

  22.     m_cam_L->start();
  23.     m_cam_R->start();
  24.     printf("thread start\n");

  25.     connect(ui->saveButton, SIGNAL(clicked()), this, SLOT(slotSaveImage()));
  26.     connect(ui->grayButton, SIGNAL(clicked()), this, SLOT(slotGrayImage()));
  27.     connect(ui->quitButton, SIGNAL(clicked()), qApp, SLOT(quit()));
  28. }

  29. Widget::~Widget()
  30. {
  31.     delete ui;

  32.     free(m_cam_L);
  33.     free(m_cam_R);
  34. }

  35. void Widget::slotShowImage(int num, unsigned char* rgb)
  36. {
  37.     ///////////////left////////////////////////
  38.     if(num == LEFT_CAM)
  39.     {
  40.         cv::Mat mat_src_L(IMG_HEIGHT, IMG_WIDTH, CV_8UC3, rgb);
  41.         QImage qimage_L;

  42.         if(m_grayFlag_L == 1)
  43.         { //gray
  44.             cv::Mat mat_gray_L(IMG_WIDTH, IMG_HEIGHT, CV_8UC1);
  45.             cv::cvtColor(mat_src_L, mat_gray_L, CV_BGR2GRAY);

  46.             qimage_L = QImage(mat_gray_L.cols, mat_gray_L.rows, QImage::Format_Indexed8); //Format_Indexed8
  47.             qimage_L.setColorCount(256);
  48.             for(int i = 0; i < 256; i++)
  49.             {
  50.                 qimage_L.setColor(i, qRgb(i, i, i));
  51.             }
  52.             unsigned char* pSrc = mat_gray_L.data;
  53.             for(int row = 0; row < mat_gray_L.rows; row++)
  54.             {
  55.                 unsigned char* pDest = qimage_L.scanLine(row);
  56.                 memcpy(pDest, pSrc, mat_gray_L.cols);
  57.                 pSrc += mat_gray_L.step;
  58.             }
  59.         }
  60.         else
  61.         {
  62.             qimage_L = QImage(mat_src_L.data, mat_src_L.cols, mat_src_L.rows, QImage::Format_RGB888);
  63.         }
  64.        ui->imgLabel_L->setPixmap(QPixmap::fromImage(qimage_L, Qt::AutoColor));

  65.         if(m_saveFlag_L == 1)
  66.         { //save
  67.             QDateTime date_time_L = QDateTime::currentDateTime();
  68.             QString imgName_L = QString("/cam/%1-%2_%3:%4:%5_L.bmp")
  69.                                                 .arg(date_time_L.date().month(), 2, 10, QChar('0'))
  70.                                                 .arg(date_time_L.date().day(), 2, 10, QChar('0'))
  71.                                                 .arg(date_time_L.time().hour(), 2, 10, QChar('0'))
  72.                                                 .arg(date_time_L.time().minute(), 2, 10, QChar('0'))
  73.                                                 .arg(date_time_L.time().second(), 2, 10, QChar('0'));
  74.             qimage_L.save(imgName_L);
  75.             imgName_L = imgName_L + " saved.";
  76.             ui->stateLabel_L->setText(imgName_L );
  77.             m_saveFlag_L = 0;
  78.         }
  79.     }

  80.     ///////////////right////////////////////////
  81.     if(num == RIGHT_CAM)
  82.     {
  83.         cv::Mat mat_src_R(IMG_HEIGHT, IMG_WIDTH, CV_8UC3, rgb);
  84.         QImage qimage_R;

  85.         if(m_grayFlag_R == 1)
  86.         { //gray
  87.             cv::Mat mat_gray_R(IMG_WIDTH, IMG_HEIGHT, CV_8UC1);
  88.             cv::cvtColor(mat_src_R, mat_gray_R, CV_BGR2GRAY);

  89.             qimage_R = QImage(mat_gray_R.cols, mat_gray_R.rows, QImage::Format_Indexed8); //Format_Indexed8
  90.             qimage_R.setColorCount(256);
  91.             for(int i = 0; i < 256; i++)
  92.             {
  93.                 qimage_R.setColor(i, qRgb(i, i, i));
  94.             }
  95.             unsigned char* pSrc = mat_gray_R.data;
  96.             for(int row = 0; row < mat_gray_R.rows; row++)
  97.             {
  98.                 unsigned char* pDest = qimage_R.scanLine(row);
  99.                 memcpy(pDest, pSrc, mat_gray_R.cols);
  100.                 pSrc += mat_gray_R.step;
  101.             }
  102.         }
  103.         else
  104.         {
  105.             qimage_R = QImage(mat_src_R.data, mat_src_R.cols, mat_src_R.rows, QImage::Format_RGB888);
  106.         }
  107.        ui->imgLabel_R->setPixmap(QPixmap::fromImage(qimage_R, Qt::AutoColor));

  108.         if(m_saveFlag_R == 1)
  109.         { //save
  110.             QDateTime date_time_R = QDateTime::currentDateTime();
  111.             QString imgName_R = QString("/cam/%1-%2_%3:%4:%5_R.bmp")
  112.                                                 .arg(date_time_R.date().month(), 2, 10, QChar('0'))
  113.                                                 .arg(date_time_R.date().day(), 2, 10, QChar('0'))
  114.                                                 .arg(date_time_R.time().hour(), 2, 10, QChar('0'))
  115.                                                 .arg(date_time_R.time().minute(), 2, 10, QChar('0'))
  116.                                                 .arg(date_time_R.time().second(), 2, 10, QChar('0'));
  117.             qimage_R.save(imgName_R);
  118.             imgName_R = imgName_R + " saved.";
  119.             ui->stateLabel_R->setText(imgName_R);
  120.             m_saveFlag_R = 0;
  121.         }
  122.     }
  123. }

  124. void Widget::slotSaveImage()
  125. {
  126.     m_saveFlag_L = 1;
  127.     m_saveFlag_R = 1;
  128. }

  129. void Widget::slotGrayImage()
  130. {
  131.     if(m_grayFlag_L == 0)
  132.     {
  133.         //change state
  134.         ui->garyLabel->setText("gray");
  135.         m_grayFlag_L =1;
  136.         m_grayFlag_R =1;
  137.         return;
  138.     }

  139.     if(m_grayFlag_L == 1)
  140.     {
  141.         //change state
  142.         ui->garyLabel->setText("orig");
  143.         m_grayFlag_L = 0;
  144.         m_grayFlag_R = 0;
  145.         return;
  146.     }
  147. }
上面这的代码中使用了一个自定义的CamThread类,这是为了实现对两个摄像头进行多线操作而创建的类。
        值得注意的是在connect(m_cam_X, SIGNAL(sigGotImage(int, unsigned char*)), this, SLOT(slotShowImage(int, unsigned char*)));中,子线程中图像数据是如何被传入主线程中的,这里实验了很久才成功。
       还有将OpenCV中的图像数据进行灰度转化时,长x宽 和 宽x长 似乎有变化,这里感谢佩佩童鞋帮我找出了这个问题。

这是camthread.h

点击(此处)折叠或打开

  1. /* 相机线程类
  2.  * 完成对相机的所用的操作,包括控制,采集
  3.  * 最后发送数据到主线程
  4.  */
  5. #ifndef CAMTHREAD_H
  6. #define CAMTHREAD_H

  7. #include <string.h>
  8. #include <stdlib.h>
  9. #include <errno.h>
  10. #include <fcntl.h>
  11. #include <unistd.h>
  12. #include <sys/ioctl.h>
  13. #include <sys/mman.h>
  14. #include <asm/types.h>
  15. #include <linux/videodev2.h>
  16. #include <QString>
  17. #include <QObject>
  18. #include <QThread>
  19. #include <QFile>
  20. #include <QtGui>
  21. #include <stdio.h>
  22. #include "globaldata.h"

  23. #define PIXE_FORMAT V4L2_PIX_FMT_YUYV//V4L2_PIX_FMT_MJPEG
  24. #define CLEAR(x) memset(&(x), 0, sizeof(x))

  25. class CamThread : public QThread
  26. {
  27.     Q_OBJECT

  28. public:
  29.     CamThread(QString devName, int num);
  30.     ~CamThread();

  31.     int initCam(); //初始化设备

  32. private:
  33.     int openDevice();
  34.     int initDevice();
  35.     int startCapturing();
  36.     //int getFrame(void** frameBuf, size_t* len);
  37.     int getFrame();
  38.     int ungetFrame();
  39.     int stopCapturing();
  40.     int uninitDevice();
  41.     int closeDevice();

  42.     int initMmap();

  43.     //MJPEG
  44.     //int is_huffman(unsigned char *buf);

  45.     //YUYV
  46.     int convertYUVtoRGBpixel(int y, int u, int v);
  47.     int convertYUVtoRGBbuffer(unsigned char* yuv, unsigned char* rgb, unsigned int w, unsigned int h);

  48. protected:
  49.     void run();

  50. signals:
  51.     //void sigGotImage(int num, unsigned char* &rgb);
  52.     void sigGotImage(int num, unsigned char* rgb);

  53. public:
  54.     struct camBuffer //定义一个结构体来映射每个缓存帧
  55.     {
  56.         void* start;
  57.         size_t length;
  58.     };

  59.     QString m_devName; //设备文件名
  60.     int m_camNum; //用于区别左右摄像头
  61.     int m_fd; //文件描述符
  62.     camBuffer* m_buffers; //数据缓存
  63.     unsigned int m_nbuffers; //记录分配内存数量
  64.     int m_index;

  65.     unsigned char* m_yuv;
  66.     unsigned char* m_rgb;

  67. };

  68. #endif // CAMTHREAD_H

这是camthread.cpp

点击(此处)折叠或打开

  1. #include "camthread.h"

  2. CamThread::CamThread(QString devName, int num)
  3. {
  4.     m_devName = devName; //设备文件名
  5.     m_camNum = num; //用于区别左右摄像头
  6.     m_fd = -1; //文件描述符
  7.     m_buffers = NULL; //数据缓存
  8.     m_nbuffers = 0; //记录分配内存数量

  9.     m_yuv = NULL;
  10.     m_rgb = NULL;
  11.     //m_qimage = NULL;
  12. }

  13. CamThread::~CamThread()
  14. {

  15. }


  16. //////////////////////////图像获取///////////////////////////////////////////////////
  17. int CamThread::initCam()
  18. {
  19.     if(openDevice() == -1)
  20.     {
  21.         printf("open %s error\n", m_devName.toStdString().c_str());
  22.         closeDevice();
  23.         return -1;
  24.     }
  25.     if(initDevice() == -1)
  26.     {
  27.         printf("init %s error\n", m_devName.toStdString().c_str());
  28.         closeDevice();
  29.         return -1;
  30.     }
  31.     if(startCapturing() == -1)
  32.     {
  33.         printf("startCapturing %s error\n", m_devName.toStdString().c_str());
  34.         closeDevice();
  35.         return -1;
  36.     }

  37.     return 0;
  38. }

  39. void CamThread::run()
  40. {
  41.     while(1)
  42.     {
  43.             getFrame();
  44.             //printf("getFrame()\n");
  45.             convertYUVtoRGBbuffer(m_yuv, m_rgb, IMG_WIDTH, IMG_HEIGHT);
  46.             //printf("YUV2RGB\n");
  47.             emit sigGotImage(m_camNum, m_rgb);
  48.             //printf("emit signal\n");
  49.             ungetFrame();
  50.             msleep(30);
  51.     }
  52. }

  53. ////////////////////////摄像头具体操作/////////////////////////////////////////////////////
  54. int CamThread::openDevice()
  55. {
  56.     m_fd = open(m_devName.toStdString().c_str(), O_RDWR, 0);
  57.     if(m_fd == -1)
  58.     {
  59.         printf("open: %s\n", strerror(errno));
  60.         return -1;
  61.     }

  62.     return 0;
  63. }

  64. int CamThread::initDevice()
  65. {
  66.     //查询设备属性
  67.     v4l2_capability cap;
  68.     if(ioctl(m_fd, VIDIOC_QUERYCAP, &cap) == -1)
  69.     {
  70.         if(EINVAL == errno)
  71.             printf("%s is no v4l2 device\n", (char*)&m_devName);
  72.         else
  73.             printf("VIDIOC_QUERYCAP: %s\n", strerror(errno));
  74.         return -1;
  75.     }
  76.     if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
  77.     {
  78.         printf("%s is not video capture device\n", (char*)&m_devName);
  79.         return -1;
  80.     }
  81.     if(!(cap.capabilities & V4L2_CAP_STREAMING))
  82.     {
  83.         printf("%s dose not support streaming\n", (char*)&m_devName);
  84.         return -1;
  85.     }
  86.     if(!(cap.capabilities & V4L2_CAP_READWRITE))
  87.     {
  88.         printf("%s dose not support read.write\n", m_devName.toStdString().c_str());
  89.         //return -1;
  90.     }

  91.     //图像的缩放
  92.     v4l2_cropcap cropcap;
  93.     v4l2_crop crop;
  94.     CLEAR(cropcap);
  95.     cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  96.     if(ioctl(m_fd, VIDIOC_CROPCAP, &cropcap) == 0)
  97.     {
  98.         CLEAR(crop);
  99.         crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  100.         crop.c = cropcap.defrect;
  101.         if(ioctl(m_fd, VIDIOC_S_CROP, &crop) == -1)
  102.         {
  103.             if(EINVAL == errno)
  104.             {
  105.                 printf("VIDIOC_S_CROP not supported\n");
  106.             }
  107.             else
  108.             {
  109.                 printf("VIDIOC_S_CROP: %s\n", strerror(errno));
  110.                 return -1;
  111.             }
  112.         }
  113.     }
  114.     else
  115.     {
  116.         printf("VIDIOC_CROPCAP: %s\n", strerror(errno));
  117.         return -1;
  118.     }

  119. //查询所有支持格式
  120.     v4l2_fmtdesc fmtdesc;
  121.      CLEAR(fmtdesc);
  122.      fmtdesc.index = 0;
  123.      fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  124.      printf("support format:\n");
  125.      while(ioctl(m_fd, VIDIOC_ENUM_FMT, &fmtdesc) != -1)
  126.      {
  127.          printf("\t%d.%s\n", fmtdesc.index+1, fmtdesc.description);
  128.          fmtdesc.index++;
  129.      }

  130.     //设置格式
  131.     v4l2_format fmt;
  132.     CLEAR(fmt);
  133.     fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  134.     fmt.fmt.pix.width = IMG_WIDTH;
  135.     fmt.fmt.pix.height = IMG_HEIGHT;
  136.     fmt.fmt.pix.pixelformat = PIXE_FORMAT;
  137.     fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  138.     if(ioctl(m_fd, VIDIOC_S_FMT, &fmt) == -1)
  139.     {
  140.         printf("VIDIOC_S_FMT: %s\n", strerror(errno));
  141.         return -1;
  142.     }

  143.     //mmap
  144.     if(initMmap() == -1)
  145.     {
  146.         printf("init mmap error \n");
  147.         return -1;
  148.     }

  149.     //m_rgb m_qimage
  150.     m_yuv = (unsigned char*)malloc(IMG_WIDTH*IMG_HEIGHT*3*sizeof(char));
  151.     m_rgb = (unsigned char*)malloc(IMG_WIDTH*IMG_HEIGHT*3*sizeof(char));

  152.     return 0;
  153. }

  154. int CamThread::startCapturing()
  155. {
  156.     //把四个缓冲帧放入队列,并启动数据流
  157.     for(unsigned int i = 0; i < m_nbuffers; ++i)
  158.     {
  159.         v4l2_buffer buf;
  160.         CLEAR(buf);
  161.         buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  162.         buf.memory = V4L2_MEMORY_MMAP;
  163.         buf.index = i;
  164.         if(ioctl(m_fd, VIDIOC_QBUF, &buf) == -1) //缓冲帧放入队列
  165.         {
  166.             printf("VIDIOC_QBUF %s\n",strerror(errno));
  167.             return -1;
  168.         }
  169.     }

  170.     v4l2_buf_type type;
  171.     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  172.     if(ioctl(m_fd, VIDIOC_STREAMON, &type) == -1)
  173.     {
  174.         printf("VIDIOC_STREAMON %s\n", strerror(errno));
  175.         return -1;
  176.     }

  177.     return 0;
  178. }

  179. //int CamThread::getFrame(void** frameBuf, size_t* len)
  180. int CamThread::getFrame()
  181. {
  182.     //从缓冲区取出一帧,并保存
  183.     v4l2_buffer queue_buf;
  184.     CLEAR(queue_buf);
  185.     queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  186.     queue_buf.memory = V4L2_MEMORY_MMAP;
  187.     if(ioctl(m_fd, VIDIOC_DQBUF, &queue_buf) == -1)
  188.     {
  189.         switch(errno)
  190.         {
  191.             case EAGAIN:
  192.                 return -1;
  193.             case EIO:
  194.                 return -1;
  195.             default:
  196.                 printf("VIDIOC_DQUBUF: %s\n", strerror(errno));
  197.                 return -1;
  198.         }
  199.     }
  200.     //printf("DQBUF done\n");
  201.     //*frameBuf = m_buffers[queue_buf.index].start;
  202.     //*len = m_buffers[queue_buf.index].length; //queue_buf.bytesused;
  203.     m_index = queue_buf.index; //buffer的序号

  204.     //YUYV
  205.     memcpy(m_yuv, m_buffers[queue_buf.index].start, m_buffers[queue_buf.index].length);
  206.     //printf("memcpy done\n");

  207. /*
  208.     //MJPEG
  209.     memcpy(m_tmpBuffer, m_buffers[queue_buf.index].start, queue_buf.bytesused );

  210.     m_fileFd = fopen("cam.jpg", "wb");
  211.     unsigned char *ptdeb, *ptcur = m_tmpBuffer;
  212.     int sizein;
  213.     if(!is_huffman(m_tmpBuffer))
  214.     {
  215.         ptdeb = ptcur = m_tmpBuffer;
  216.         while (((ptcur[0] << 8) | ptcur[1]) != 0xffc0)
  217.             ptcur++;
  218.         sizein = ptcur - ptdeb;
  219.         fwrite(m_tmpBuffer, sizein, 1, m_fileFd);
  220.         fwrite(dht_data, DHT_SIZE, 1, m_fileFd);
  221.         fwrite(ptcur, queue_buf.bytesused-sizein, 1, m_fileFd);
  222.     }
  223.     else
  224.     {
  225.         fwrite(ptcur, queue_buf.bytesused, 1, m_fileFd);
  226.     }
  227.     fclose(m_fileFd);
  228. */

  229.     return 0;
  230. }

  231. int CamThread::ungetFrame()
  232. {
  233.     if(m_index != -1)
  234.     {
  235.         //将取出的帧放回缓冲区(队列)
  236.         v4l2_buffer queue_buf;
  237.         CLEAR(queue_buf);
  238.         queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  239.         queue_buf.memory = V4L2_MEMORY_MMAP;
  240.         queue_buf.index = m_index;
  241.         if(ioctl(m_fd, VIDIOC_QBUF, &queue_buf) == -1)
  242.         {
  243.             printf("VIDIOC_QBUF: %s\n", strerror(errno));
  244.             return -1;
  245.         }

  246.         return 0;
  247.     }
  248.     printf("m_index = -1\n");
  249.     return -1;
  250. }

  251. int CamThread::stopCapturing()
  252. {
  253.     v4l2_buf_type type;
  254.     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  255.     if(ioctl(m_fd, VIDIOC_STREAMOFF, &type) == -1)
  256.     {
  257.         printf("VIDIOC_STREAMOFF %s\n", strerror(errno));
  258.         return -1;
  259.     }

  260.     return 0;
  261. }

  262. int CamThread::uninitDevice()
  263. {
  264.     for(unsigned int i = 0; i < m_nbuffers; ++i)
  265.     {
  266.         if(munmap(m_buffers[i].start, m_buffers[i].length))
  267.         {
  268.             printf("munmap %s\n", strerror(errno));
  269.             return -1;
  270.         }
  271.     }
  272.     free(m_buffers);
  273.     free(m_yuv);
  274.     free(m_rgb);

  275.     return 0;
  276. }

  277. int CamThread::closeDevice()
  278. {
  279.     if(close(m_fd) == -1)
  280.     {
  281.         printf("close: %s", strerror(errno));
  282.         return -1;
  283.     }

  284.     return 0;
  285. }

  286. int CamThread::initMmap()
  287. {
  288.     //申请缓冲区 ?申请设备中的缓存帧
  289.     v4l2_requestbuffers reqbuf;
  290.     CLEAR(reqbuf);
  291.     reqbuf.count = 4; //缓冲区内缓冲帧数目
  292.     reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //缓存帧数据格式
  293.     reqbuf.memory = V4L2_MEMORY_MMAP; //I/O方式,内存映射的方式
  294.     if(ioctl(m_fd, VIDIOC_REQBUFS, &reqbuf) == -1)
  295.     {
  296.         if(EINVAL == errno)
  297.             printf("%s does not support memory mapping\n", (char*)&m_devName);
  298.         else
  299.             printf("VIDIOC_REQUBUF %s\n", strerror(errno));
  300.         return -1;
  301.     }
  302.     if(reqbuf.count < 2)
  303.     {
  304.         printf("insufficient buffer memory on %s\n", (char*)&m_devName);
  305.         return -1;
  306.     }

  307.     //映射缓存区帧
  308.     m_buffers = (camBuffer*)calloc(reqbuf.count, sizeof(*m_buffers));
  309.     if(!m_buffers)
  310.     {
  311.         printf("out of memory\n");
  312.         return -1;
  313.     }
  314.     for(m_nbuffers = 0; m_nbuffers < reqbuf.count; ++m_nbuffers)
  315.     {
  316.         v4l2_buffer buf; //缓存帧
  317.         CLEAR(buf);
  318.         buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  319.         buf.memory = V4L2_MEMORY_MMAP;
  320.         buf.index = m_nbuffers; //buffer的序号
  321.         // 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
  322.         if(ioctl(m_fd, VIDIOC_QUERYBUF, &buf) == -1) //获取(查询)这些缓存帧的地址、长度
  323.         {
  324.             printf("VIDIOC_QUERYBUF: %s\n", strerror(errno));
  325.             return -1;
  326.         }
  327.         m_buffers[m_nbuffers].length = buf.length; //设置
  328.         //将申请到的缓存帧映射到应用程序,用m_buffers指针记录
  329.         m_buffers[m_nbuffers].start = mmap(NULL,
  330.                                                                         buf.length,
  331.                                                                         PROT_READ | PROT_WRITE,
  332.                                                                         MAP_SHARED,
  333.                                                                         m_fd,
  334.                                                                         buf.m.offset);
  335.         if(m_buffers[m_nbuffers].start == MAP_FAILED)
  336.         {
  337.             printf("mmap %s\n", strerror(errno));
  338.             return -1;
  339.         }
  340.     }

  341.     return 0;
  342. }

  343. //////////////////////MJPEG/////////////////////////////////
  344. /*
  345. int CamThread::is_huffman(unsigned char *buf)
  346. {
  347.     unsigned char *ptbuf;
  348.     int i = 0;
  349.     ptbuf = buf;
  350.     while (((ptbuf[0] << 8) | ptbuf[1]) != 0xffda)
  351.     {
  352.             if(i++ > 2048)
  353.                     return 0;
  354.             if(((ptbuf[0] << 8) | ptbuf[1]) == 0xffc4)
  355.                     return 1;
  356.             ptbuf++;
  357.     }
  358.     return 0;
  359. }
  360. */

  361. //////////////////////YUYV图像转换////////////////////////////////////////
  362. int CamThread::convertYUVtoRGBpixel(int y, int u, int v)
  363. {
  364.     unsigned int pixel_32 = 0;
  365.     unsigned char *pixel = (unsigned char *)&pixel_32;
  366.     int r, g, b;
  367.     r = y + (1.370705 * (v-128));
  368.     g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
  369.     b = y + (1.732446 * (u-128));
  370.     if(r > 255)
  371.         r = 255;
  372.     if(g > 255)
  373.         g = 255;
  374.     if(b > 255)
  375.         b = 255;
  376.     if(r < 0)
  377.         r = 0;
  378.     if(g < 0)
  379.         g = 0;
  380.     if(b < 0)
  381.         b = 0;
  382.     pixel[0] = r*220/256;
  383.     pixel[1] = g*220/256;
  384.     pixel[2] = b*220/256;

  385.     return pixel_32;
  386. }

  387. int CamThread::convertYUVtoRGBbuffer(unsigned char* yuv, unsigned char* rgb, unsigned int w, unsigned int h)
  388. {
  389.     unsigned int in, out = 0;
  390.     unsigned int pixel_16;
  391.     unsigned char pixel_24[3];
  392.     unsigned int pixel_32;
  393.     int y0, u, y1, v;

  394.     for(in = 0; in < w*h*2; in+=4)
  395.     {
  396.         pixel_16 = yuv[in+3]<<24 |
  397.                           yuv[in+2]<<16 |
  398.                           yuv[in+1]<<8 |
  399.                           yuv[in+0];
  400.         y0 = (pixel_16 & 0x000000ff);
  401.         u = (pixel_16 & 0x0000ff00) >> 8;
  402.         y1 = (pixel_16 & 0x00ff0000) >> 16;
  403.         v = (pixel_16 & 0xff000000) >> 24;

  404.         pixel_32 = convertYUVtoRGBpixel(y0, u, v);
  405.         pixel_24[0] = (pixel_32 & 0x000000ff);
  406.         pixel_24[1] = (pixel_32 & 0x0000ff00) >> 8;
  407.         pixel_24[2] = (pixel_32 & 0x00ff0000) >> 16;
  408.         rgb[out++] = pixel_24[0];
  409.         rgb[out++] = pixel_24[1];
  410.         rgb[out++] = pixel_24[2];

  411.         pixel_32 = convertYUVtoRGBpixel(y1, u, v);
  412.         pixel_24[0] = (pixel_32 & 0x000000ff);
  413.         pixel_24[1] = (pixel_32 & 0x0000ff00) >> 8;
  414.         pixel_24[2] = (pixel_32 & 0x00ff0000) >> 16;
  415.         rgb[out++] = pixel_24[0];
  416.         rgb[out++] = pixel_24[1];
  417.         rgb[out++] = pixel_24[2];
  418.     }

  419.     return 0;
  420. }

CamThread类是通过V4L2来实现对摄像头的操作,从摄像头中获取到YUV格式的数据后,还要将其转换为RGB格式的,这里看了关于YUV与RGB颜色空间之间关系的论文,他俩之间是可以相互转换的,最后参考了网上一位大神的代码,时间久远查找不便,不能给出出处请见谅。发现论文中的参数和大神代码中的参数是有些微不同的,但大神的代码运行良好而且参数差值很小,因此没有修改。

至于main.cpp就不用在赘述了。。。

附上运行图


个人水平有限难免有错误之处,希望不吝敕教
未能详尽解说,之后会再进行补充

阅读(2964) | 评论(0) | 转发(0) |
给主人留下些什么吧!~~