Chinaunix首页 | 论坛 | 博客
  • 博客访问: 702342
  • 博文数量: 152
  • 博客积分: 0
  • 博客等级: 民兵
  • 技术积分: 1793
  • 用 户 组: 普通用户
  • 注册时间: 2013-09-12 12:26
个人简介

相信自己,只有不想做的,没有做不到的。

文章分类

全部博文(152)

文章存档

2021年(1)

2015年(2)

2014年(74)

2013年(75)

分类: LINUX

2014-01-18 11:43:28

开发环境:ubuntu12.04
开发板:全志A20+蜂鸟开发系统
软件需求:qt4.8.5,opencv2.4.2

1,首先该款摄像头支持linux。
2,配置内核,
    (1)在arm linux的kernel目录下make menuconfig。
    (2)首先(*)选择Multimedia device->下的Video for linux。加载video4linux模块,为视频采集设备提供编程接口;
    (3)然后在usb support->目录下(*)选择support for usb和usb camera ov511 support(适合自己所适用摄像头芯片型号,在此适用ov511)。这使得在内核中加入了对采用OV511接口芯片    的USB数字摄像头的驱动支持。
    (4)保存配置退出。
    (5)make zImage 烧写内核。
    (6)在/dev目录下有video0就说明摄像头驱动已加载
3,应用程序实现
在这里我们适用的是qt作为图形界面的框架来显示摄像头采集的图像。(适用qt的函数首先要安装qt的arm库这个需要源码编译)
创建一个qt项目,添加video.h video.cpp 
实现video.h代码如下
#ifndef VIDEODEVICE_H
#define VIDEODEVICE_H

#include
#include
#include
#include
#include
#include
#include
#include
#include
#include


#define CLEAR(x) memset(&(x), 0, sizeof(x))


class VideoDevice : public QObject
{
    Q_OBJECT
public:
    VideoDevice(QString dev_name);
    //VideoDevice();
    int open_device();
    int close_device();
    int init_device();
    int start_capturing();
    int stop_capturing();
    int uninit_device();
    int get_frame(void **, size_t*);
    int unget_frame();
private:
    int init_mmap();
    struct buffer
    {
        void * start;
        size_t length;
    };
    QString dev_name;
    int fd;
    buffer* buffers;
    unsigned int n_buffers;
    int index;

signals:
    void display_error(QString);
};

#endif // VIDEODEVICE_H

实现video.cpp代码如下
#include "videodevice.h"
#include

VideoDevice::VideoDevice(QString dev_name)
{
    this->dev_name = dev_name;
    this->fd = -1;
    this->buffers = NULL;
    this->n_buffers = 0;
    this->index = -1;


}

int VideoDevice::open_device()
{
    fd = open(dev_name.toStdString().c_str(), O_RDWR/*|O_NONBLOCK*/, 0);
   // fd = open(dev_name.toStdString().c_str(), O_RDWR|O_NONBLOCK, 0);


    if(-1 == fd)
    {
        emit display_error(tr("open: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}

int VideoDevice::close_device()
{
    if(-1 == close(fd))
    {
        emit display_error(tr("close: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}


int VideoDevice::init_device()
{
    v4l2_capability cap;
    v4l2_cropcap cropcap;
    v4l2_crop crop;
    v4l2_format fmt;

    if(-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap))
    {
        if(EINVAL == errno)
        {
            emit display_error(tr("%1 is no V4l2 device").arg(dev_name));
        }
        else
        {
            emit display_error(tr("VIDIOC_QUERYCAP: %1").arg(QString(strerror(errno))));
        }
        return -1;
    }

    if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
        emit display_error(tr("%1 is no video capture device").arg(dev_name));
        return -1;
    }

    if(!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        emit display_error(tr("%1 does not support streaming i/o").arg(dev_name));
        return -1;
    }

    CLEAR(cropcap);

    cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if(0 == ioctl(fd, VIDIOC_CROPCAP, &cropcap))
    {
        CLEAR(crop);
        crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        crop.c = cropcap.defrect;


        if(-1 == ioctl(fd, VIDIOC_S_CROP, &crop))
        {
            if(EINVAL == errno)
            {
//                emit display_error(tr("VIDIOC_S_CROP not supported"));
            }
            else
            {
                emit display_error(tr("VIDIOC_S_CROP: %1").arg(QString(strerror(errno))));
                return -1;
            }
        }
    }
    else
    {
        emit display_error(tr("VIDIOC_CROPCAP: %1").arg(QString(strerror(errno))));
        return -1;
    }

    CLEAR(fmt);

    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = 320;
    fmt.fmt.pix.height = 240;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;


    if(-1 == ioctl(fd, VIDIOC_S_FMT, &fmt))
    {
        emit display_error(tr("VIDIOC_S_FMT").arg(QString(strerror(errno))));
        return -1;
    }

    if(-1 == init_mmap())
    {
        return -1;
    }


    return 0;
}


int VideoDevice::init_mmap()
{
    v4l2_requestbuffers req;
    CLEAR(req);

    req.count = 4;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    if(-1 == ioctl(fd, VIDIOC_REQBUFS, &req))
    {
        if(EINVAL == errno)
        {
            emit display_error(tr("%1 does not support memory mapping").arg(dev_name));
            return -1;
        }
        else
        {
            emit display_error(tr("VIDIOC_REQBUFS %1").arg(QString(strerror(errno))));
            return -1;
        }
    }

    if(req.count < 2)
    {
        emit display_error(tr("Insufficient buffer memory on %1").arg(dev_name));
        return -1;
    }

    buffers = (buffer*)calloc(req.count, sizeof(*buffers));


    if(!buffers)
    {
        emit display_error(tr("out of memory"));
        return -1;
    }

    for(n_buffers = 0; n_buffers < req.count; ++n_buffers)
    {
        v4l2_buffer buf;
        CLEAR(buf);

        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = n_buffers;


        if(-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf))
        {
            emit display_error(tr("VIDIOC_QUERYBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }

        buffers[n_buffers].length = buf.length;
        buffers[n_buffers].start =
                mmap(NULL, // start anywhere
                     buf.length,
                     PROT_READ | PROT_WRITE,
                     MAP_SHARED,
                     fd, buf.m.offset);


        if(MAP_FAILED == buffers[n_buffers].start)
        {
            emit display_error(tr("mmap %1").arg(QString(strerror(errno))));
            return -1;
        }
    }
    return 0;

}

int VideoDevice::start_capturing()
{
    unsigned int i;
    for(i = 0; i < n_buffers; ++i)
    {
        v4l2_buffer buf;
        CLEAR(buf);


        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory =V4L2_MEMORY_MMAP;
        buf.index = i;
//        fprintf(stderr, "n_buffers: %d\n", i);


        if(-1 == ioctl(fd, VIDIOC_QBUF, &buf))
        {
            emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }
    }


    v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;


    if(-1 == ioctl(fd, VIDIOC_STREAMON, &type))
    {
        emit display_error(tr("VIDIOC_STREAMON: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}


int VideoDevice::stop_capturing()
{
    v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;


    if(-1 == ioctl(fd, VIDIOC_STREAMOFF, &type))
    {
        emit display_error(tr("VIDIOC_STREAMOFF: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}


int VideoDevice::uninit_device()
{
    unsigned int i;
    for(i = 0; i < n_buffers; ++i)
    {
        if(-1 == munmap(buffers[i].start, buffers[i].length))
        {
            emit display_error(tr("munmap: %1").arg(QString(strerror(errno))));
            return -1;
        }


    }
    free(buffers);
    return 0;
}


int VideoDevice::get_frame(void **frame_buf, size_t* len)
{
    v4l2_buffer queue_buf;
    CLEAR(queue_buf);


    queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    queue_buf.memory = V4L2_MEMORY_MMAP;


    if(-1 == ioctl(fd, VIDIOC_DQBUF, &queue_buf))
    {
        switch(errno)
        {
        case EAGAIN:
//            perror("dqbuf");
            return -1;
        case EIO:
            return -1 ;
        default:
            emit display_error(tr("VIDIOC_DQBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }
    }


    *frame_buf = buffers[queue_buf.index].start;
    *len = buffers[queue_buf.index].length;
    index = queue_buf.index;


    return 0;


}


int VideoDevice::unget_frame()
{
    if(index != -1)
    {
        v4l2_buffer queue_buf;
        CLEAR(queue_buf);


        queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        queue_buf.memory = V4L2_MEMORY_MMAP;
        queue_buf.index = index;


        if(-1 == ioctl(fd, VIDIOC_QBUF, &queue_buf))
        {
            emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }
        return 0;
    }
    return -1;
}

在widget.h的中添加如下参数

private:

VideoDevice *m_camera;  //摄像头
    uchar *imgBuf;  //QImage图像空间
    int camReturn;  //摄像头使用过程返回的结果,用于状态判断
    QImage *frame;  //QImage图像
    QTimer *timer;
    IplImage *frameget; //摄像头每次抓取的图像为一帧,使用该指针指向一帧图像的内存空间
    int convert_yuv_to_rgb_pixel(int y, int u, int v);
    int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);
    IplImage *QImageToIplImage(const QImage * qImage);

在widget.cpp代码如下
宏定义
#define image_width 320 //图片显示宽度
#define image_height 240    //图片显示高度
#define image_Format QImage::Format_RGB888 //图片显示格式
#define cameraDevice "/dev/video0"  //摄像头设备

#define imgSizeScaleSmall 0.5    //图像防缩比例
#define imgSizeScaleBig 2    //图像防缩比例

在widget析构函数中添加
    imgBuf = (unsigned char *)malloc(image_width * image_height* 3 * sizeof(char));
    frame = new QImage(imgBuf,image_width,image_height,image_Format);   //根据存储空间开辟QImage图像
    m_camera = new VideoDevice(tr(cameraDevice));  //获取摄像头设备
    connect(m_camera, SIGNAL(display_error(QString)), this,SLOT(display_error(QString)));
    camReturn = m_camera->open_device();    //打开摄像头
    if(-1==camReturn)
    {
        QMessageBox::warning(this,tr("error"),tr("open /dev/dsp error"),QMessageBox::Yes);
        m_camera->close_device();
    }
    camReturn = m_camera->init_device();    //初始化摄像头
    if(-1==camReturn)
    {
        QMessageBox::warning(this,tr("error"),tr("init failed"),QMessageBox::Yes);
        m_camera->close_device();
    }
    camReturn = m_camera->start_capturing();    //摄像头开始捕获图像
    if(-1==camReturn)
    {
        QMessageBox::warning(this,tr("error"),tr("start capture failed"),QMessageBox::Yes);
        m_camera->close_device();
    }

    if(-1==camReturn)
    {
        QMessageBox::warning(this,tr("error"),tr("get frame failed"),QMessageBox::Yes);
        m_camera->stop_capturing();
    }

    //设定时间间隔,对图像界面进行刷新
    timer = new QTimer(this);
    connect(timer,SIGNAL(timeout()),this,SLOT(paintEvent()));   //update将调用paintEvent
    timer->start(30);
}

void Widget::paintEvent(QPaintEvent *) //定时器没30ms调用一次paintEvent()图像30ms换一帧
{
    uchar * pImgBuf;
    size_t len;
    camReturn = m_camera->get_frame((void **)&pImgBuf,&len); //获得原始图像yuv格式数据和数据的长度
    convert_yuv_to_rgb_buffer(pImgBuf,imgBuf,image_width,image_height);//yuv格式转换成rgb格式
    frame->loadFromData((uchar *)imgBuf,/*len*/image_width * image_height * 3 * sizeof(char));//转换成QImage格式

    IplImage* src = QImageToIplImage(frame);//QImage 转成IplImage

    //进行图像压缩
    double sizeScale = imgSizeScaleSmall;
    CvSize img_cvsize;
    img_cvsize.width = src->width * sizeScale;
    img_cvsize.height = src->height * sizeScale;
    IplImage* dst = cvCreateImage(img_cvsize, src->depth, src->nChannels);
    cvResize(src, dst, CV_INTER_LINEAR);

                //detect_and_draw(dst);  //图像处理

    sizeScale = imgSizeScaleBig;
    img_cvsize.width = dst->width * sizeScale;
    img_cvsize.height = dst->height * sizeScale;
    IplImage* img = cvCreateImage(img_cvsize, dst->depth, dst->nChannels);
    cvResize(dst, img, CV_INTER_LINEAR);

    
 QImage qimage = QImage((uchar *)img->imageData, img->width,img->height, image_Format);
    //IplImage为BGR格式,QImage为RGB格式,所以要交换B和R通道显示才正常
    //可以用OpenCV的cvConcertImage函数交换,也可以用QImage的rgbSwapped函数交换;
    qimage = qimage.rgbSwapped();
    ui->m_imgLabel->setPixmap(QPixmap::fromImage(qimage));
    camReturn = m_camera->unget_frame();
    cvReleaseImage(&img);   //释放图片内存
    cvReleaseImage(&dst);   //释放图片内存
    cvReleaseImage(&src);

}

//以下是  convert_yuv_to_rgb_buffer QImageToIplImage   函数的实现
/*yuv格式转换为rgb格式*/
int Widget::convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
 unsigned int in, out = 0;
 unsigned int pixel_16;
 unsigned char pixel_24[3];
 unsigned int pixel32;
 int y0, u, y1, v;
 for(in = 0; in < width * height * 2; in += 4) {
  pixel_16 =
   yuv[in + 3] << 24 |
   yuv[in + 2] << 16 |
   yuv[in + 1] <<  8 |
   yuv[in + 0];
  y0 = (pixel_16 & 0x000000ff);
  u  = (pixel_16 & 0x0000ff00) >>  8;
  y1 = (pixel_16 & 0x00ff0000) >> 16;
  v  = (pixel_16 & 0xff000000) >> 24;
  pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
  pixel_24[0] = (pixel32 & 0x000000ff);
  pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
  pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
  rgb[out++] = pixel_24[0];
  rgb[out++] = pixel_24[1];
  rgb[out++] = pixel_24[2];
  pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
  pixel_24[0] = (pixel32 & 0x000000ff);
  pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
  pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
  rgb[out++] = pixel_24[0];
  rgb[out++] = pixel_24[1];
  rgb[out++] = pixel_24[2];
 }
 return 0;
}


int Widget::convert_yuv_to_rgb_pixel(int y, int u, int v)
{
 unsigned int pixel32 = 0;
 unsigned char *pixel = (unsigned char *)&pixel32;
 int r, g, b;
 r = y + (1.370705 * (v-128));
 g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
 b = y + (1.732446 * (u-128));
 if(r > 255) r = 255;
 if(g > 255) g = 255;
 if(b > 255) b = 255;
 if(r < 0) r = 0;
 if(g < 0) g = 0;
 if(b < 0) b = 0;
 pixel[0] = r * 220 / 256;
 pixel[1] = g * 220 / 256;
 pixel[2] = b * 220 / 256;
 return pixel32;
}
/*yuv格式转换为rgb格式*/


//QImage 转 IplImage
IplImage* Widget::QImageToIplImage(const QImage * qImage)
{
    int width = qImage->width();
    int height = qImage->height();
    CvSize Size;
    Size.height = height;
    Size.width = width;
    IplImage *IplImageBuffer = cvCreateImage(Size, IPL_DEPTH_8U, 3);    //记着释放内存
    for (int y = 0; y < height; ++y)
    {
        for (int x = 0; x < width; ++x)
        {
            QRgb rgb = qImage->pixel(x, y);
            cvSet2D(IplImageBuffer, y, x, CV_RGB(qRed(rgb), qGreen(rgb), qBlue(rgb)));
        }
    }
    return IplImageBuffer;
}

适用交叉编译器编译后,拷贝到arm开发板上,记住还有qt库,和qt的一些环境变量导入,做图像压缩的时候适用到了opencv的函数,所以也要导入opencv的库,这些库都是需要下载源码编译的。运行程序,
这就可以在开发板的lcd屏上看到摄像头的图像了。
阅读(6000) | 评论(0) | 转发(0) |
给主人留下些什么吧!~~