USB摄像头linux实现方法是什么
本篇内容介绍了“USB摄像头linux实现方法是什么”的有关知识,在实际案例的操作过程中,不少人都会遇到这样的困境,接下来就让小编带领大家学习一下如何处理这些情况吧!希望大家仔细阅读,能够学有所成!
10年积累的网站建设、网站设计经验,可以快速应对客户对网站的新想法和需求。提供各种问题对应的解决方案。让选择我们的客户得到更好、更有力的网络服务。我虽然不认识你,你也不认识我。但先网站设计后付款的网站建设流程,更有镇雄免费网站建设让你可以放心的选择与我们合作。
一、前言
做嵌入式linux上的开发很多年了,扳手指头算算,也起码9年了,陆陆续续做过很过诸如需要读取外接的USB摄像头或者CMOS摄像机的程序,实时采集视频,将图像传到前端,或者对图像进行人脸分析处理,最开始尝试的就是QCamera来处理,直接歇菜放弃,后面通过搜索发现都说要用v4l2视频框架来进行,于是东搞搞西搞搞尝试了很多次,终于整出来了,前后完善了好几年,无论写什么程序,发现要简简单单的实现基础的功能,都是非常快速而且容易的,但是想要做得好做得精,要花不少的精力时间去完善,适应各种不同的场景,比如就说用v4l2加载摄像头这个,需要指定设备文件来读取,而现场不可能让用户来给你指定,频繁的拔插也会导致设备文件名的改动,所以必须找到一个机制自动寻找你想要的摄像机的设备文件名称,比如开个定时器去调用linux命令来处理,甚至在不同的系统平台上要执行的命令还有些许的区别,如果本地有多个摄像头还需要区分左右之类的时候,那就只能通过断电先后上电顺序次序来区分了。
linux方案处理流程:
调用封装的函数findCamera实时查找摄像头设备文件名。
调用::open函数打开设备文件。
调用封装的函数initCamera初始化摄像头参数(图片格式、分辨率等)。
调用::select函数从缓冲区取出一个缓冲帧。
缓冲帧数据是yuyv格式的,需要转换rgb24再转成QImage。
拿到图片进行绘制、人脸分析等。
关闭设备文件。
二、功能特点
同时支持windows、linux、嵌入式linux上的USB摄像头实时采集。
支持多路USB摄像头多线程实时采集。
在嵌入式linux设备上,自动查找USB设备文件并加载。
可手动设置设备文件名称,手动设置后按照手动设置的设备文件加载。
在嵌入式linux设备上支持人脸识别接口,实时绘制人脸框。
具有打开、暂停、继续、关闭、截图等常规功能。
可设置两路OSD标签,分别设置文本、颜色、字号、位置等。
可作为视频监控系统使用。
三、效果图
四、核心代码
void CameraLinux::run() { while (!stopped) { if (!cameraOk) { msleep(10); continue; } if (isPause) { //这里需要假设正常,暂停期间继续更新时间 lastTime = QDateTime::currentDateTime(); msleep(10); continue; } QImage image = readImage(); if (!image.isNull()) { if (isSnap) { emit snapImage(image); isSnap = false; } if (findFaceOne) { findFace(image); } if (findFaceRect) { image = drawFace(image); } lastTime = QDateTime::currentDateTime(); emit receiveImage(image); } msleep(interval); } this->closeCamera(); this->initData(); } QDateTime CameraLinux::getLastTime() const { return this->lastTime; } QString CameraLinux::getCameraName() const { return this->cameraName; } int CameraLinux::getCameraWidth() const { return this->cameraWidth; } int CameraLinux::getCameraHeight() const { return this->cameraHeight; } void CameraLinux::sleep(int msec) { if (msec > 0) { QTime endTime = QTime::currentTime().addMSecs(msec); while (QTime::currentTime() < endTime) { QCoreApplication::processEvents(QEventLoop::AllEvents, 100); } } } void CameraLinux::initData() { stopped = false; isPause = false; isSnap = false; cameraOk = false; cameraHwnd = -1; errorCount = 0; } void CameraLinux::readData() { QStringList cameraNames; while (!process->atEnd()) { //逐行读取返回的结果 过滤video开头的是摄像头设备文件 QString line = process->readLine(); if (line.startsWith("video")) { line = line.replace("\n", ""); cameraNames << QString("/dev/%1").arg(line); } } if (cameraNames.count() > 0) { cameraName = cameraNames.first(); emit receiveCamera(cameraNames); qDebug() << TIMEMS << cameraNames; } } bool CameraLinux::initCamera() { //如果没有指定设备文件名称(默认auto)则查找 if (cameraName == "auto") { findCamera(); } //延时判断是否获取到了设备文件 sleep(300); return openCamera(); } void CameraLinux::findCamera() { if (process->state() == QProcess::NotRunning) { process->start("ls /dev/"); } } bool CameraLinux::openCamera() { #ifdef Q_OS_LINUX if (cameraName.length() > 5) { cameraHwnd = ::open(cameraName.toUtf8().data(), O_RDWR | O_NONBLOCK, 0); } if (cameraHwnd < 0) { qDebug() << TIMEMS << "open camera error"; return false; } //查询设备属性 struct v4l2_capability capability; if (::ioctl(cameraHwnd, VIDIOC_QUERYCAP, &capability) < 0) { qDebug() << TIMEMS << "error in VIDIOC_QUERYCAP"; ::close(cameraHwnd); return false; } if (!(capability.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { qDebug() << TIMEMS << "it is not a video capture device"; ::close(cameraHwnd); return false; } if (!(capability.capabilities & V4L2_CAP_STREAMING)) { qDebug() << TIMEMS << "it can not streaming"; ::close(cameraHwnd); return false; } if (capability.capabilities == 0x4000001) { qDebug() << TIMEMS << "capabilities" << "V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING"; } //设置视频输入源 int input = 0; if (::ioctl(cameraHwnd, VIDIOC_S_INPUT, &input) < 0) { qDebug() << TIMEMS << "error in VIDIOC_S_INPUT"; ::close(cameraHwnd); return false; } //设置图片格式和分辨率 struct v4l2_format format; format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //多种格式 V4L2_PIX_FMT_YUV420 V4L2_PIX_FMT_YUYV(422) V4L2_PIX_FMT_RGB565 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; //部分硬件花屏要设置成 V4L2_FIELD_NONE format.fmt.pix.field = V4L2_FIELD_INTERLACED; format.fmt.pix.width = cameraWidth; format.fmt.pix.height = cameraHeight; int bpp = 16; //format.fmt.pix.bytesperline = width * bpp / 8; //format.fmt.pix.sizeimage = cameraWidth * cameraHeight * bpp / 8; if (::ioctl(cameraHwnd, VIDIOC_S_FMT, &format) < 0) { ::close(cameraHwnd); return false; } //查看图片格式和分辨率,判断是否设置成功 if (::ioctl(cameraHwnd, VIDIOC_G_FMT, &format) < 0) { qDebug() << TIMEMS << "error in VIDIOC_G_FMT"; ::close(cameraHwnd); return false; } //重新打印下宽高看下是否真正设置成功 struct v4l2_pix_format pix = format.fmt.pix; quint32 pixelformat = pix.pixelformat; qDebug() << TIMEMS << "cameraWidth" << cameraWidth << "cameraHeight" << cameraHeight << "width" << pix.width << "height" << pix.height; qDebug() << TIMEMS << "pixelformat" << QString("%1%2%3%4").arg(QChar(pixelformat & 0xFF)).arg(QChar((pixelformat >> 8) & 0xFF)).arg(QChar((pixelformat >> 16) & 0xFF)).arg(QChar((pixelformat >> 24) & 0xFF)); //重新设置宽高为真实的宽高 cameraWidth = pix.width; cameraHeight = pix.height; //设置帧格式 struct v4l2_streamparm streamparm; streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; streamparm.parm.capture.timeperframe.numerator = 1; streamparm.parm.capture.timeperframe.denominator = 25; streamparm.parm.capture.capturemode = 0; if (::ioctl(cameraHwnd, VIDIOC_S_PARM, &streamparm) < 0) { qDebug() << TIMEMS << "error in VIDIOC_S_PARM"; ::close(cameraHwnd); return false; } if (::ioctl(cameraHwnd, VIDIOC_G_PARM, &streamparm) < 0) { qDebug() << TIMEMS << "error in VIDIOC_G_PARM"; ::close(cameraHwnd); return false; } //申请和管理缓冲区 struct v4l2_requestbuffers requestbuffers; requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; requestbuffers.memory = V4L2_MEMORY_MMAP; requestbuffers.count = 1; if (::ioctl(cameraHwnd, VIDIOC_REQBUFS, &requestbuffers) < 0) { qDebug() << TIMEMS << "error in VIDIOC_REQBUFS"; ::close(cameraHwnd); return false; } buff_yuv422 = (uchar *)malloc(cameraWidth * cameraHeight * bpp / 8); buff_yuv420 = (uchar *)malloc(cameraWidth * cameraHeight * bpp / 8); buff_rgb24 = (uchar *)malloc(cameraWidth * cameraHeight * 24 / 8); buff_img = (ImgBuffer *)calloc(1, sizeof(ImgBuffer)); if (buff_img == NULL) { qDebug() << TIMEMS << "error in calloc"; ::close(cameraHwnd); return false; } struct v4l2_buffer buffer; for (int index = 0; index < 1; index++) { buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; buffer.index = index; if (::ioctl(cameraHwnd, VIDIOC_QUERYBUF, &buffer) < 0) { qDebug() << TIMEMS << "error in VIDIOC_QUERYBUF"; ::free(buff_img); ::close(cameraHwnd); return false; } buff_img[index].length = buffer.length; buff_img[index].start = (quint8 *)mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, cameraHwnd, buffer.m.offset); if (MAP_FAILED == buff_img[index].start) { qDebug() << TIMEMS << "error in mmap"; ::free(buff_img); ::close(cameraHwnd); return false; } //把缓冲帧放入队列 if (::ioctl(cameraHwnd, VIDIOC_QBUF, &buffer) < 0) { qDebug() << TIMEMS << "error in VIDIOC_QBUF"; for (int i = 0; i <= index; i++) { munmap(buff_img[i].start, buff_img[i].length); } ::free(buff_img); ::close(cameraHwnd); return false; } } enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (::ioctl(cameraHwnd, VIDIOC_STREAMON, &type) < 0) { qDebug() << TIMEMS << "error in VIDIOC_STREAMON"; for (int i = 0; i < 1; i++) { munmap(buff_img[i].start, buff_img[i].length); } ::free(buff_img); ::close(cameraHwnd); return false; } cameraOk = true; #endif qDebug() << TIMEMS << "open camera ok"; return cameraOk; } void CameraLinux::closeCamera() { #ifdef Q_OS_LINUX if (cameraOk && buff_img != NULL) { //停止摄像头采集 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (::ioctl(cameraHwnd, VIDIOC_STREAMOFF, &type) < 0) { qDebug() << TIMEMS << "error in VIDIOC_STREAMOFF"; } //解除内存映射 for (int i = 0; i < 1; i++) { munmap((buff_img)[i].start, (buff_img)[i].length); } //关闭设备文件 ::close(cameraHwnd); qDebug() << TIMEMS << "close camera ok"; } //释放资源 ::free(buff_img); buff_img = NULL; ::free(buff_yuv422); buff_yuv422 = NULL; ::free(buff_yuv420); buff_yuv420 = NULL; ::free(buff_rgb24); buff_rgb24 = NULL; cameraOk = false; cameraHwnd = -1; #endif } int CameraLinux::readFrame() { int index = -1; #ifdef Q_OS_LINUX //等待摄像头采集到一桢数据 for (;;) { fd_set fds; struct timeval tv; FD_ZERO(&fds); FD_SET(cameraHwnd, &fds); tv.tv_sec = 2; tv.tv_usec = 0; int r = ::select(cameraHwnd + 1, &fds, NULL, NULL, &tv); if (-1 == r) { if (EINTR == errno) { continue; } return -1; } else if (0 == r) { return -1; } else { //采集到一张图片 跳出循环 break; } } //从缓冲区取出一个缓冲帧 struct v4l2_buffer buffer; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; if (::ioctl(cameraHwnd, VIDIOC_DQBUF, &buffer) < 0) { qDebug() << TIMEMS << "error in VIDIOC_DQBUF"; return -1; } memcpy(buff_yuv422, (uchar *)buff_img[buffer.index].start, buff_img[buffer.index].length); //将取出的缓冲帧放回缓冲区 if (::ioctl(cameraHwnd, VIDIOC_QBUF, &buffer) < 0) { qDebug() << TIMEMS << "error in VIDIOC_QBUF"; return -1; } index = buffer.index; #endif return index; }
“USB摄像头linux实现方法是什么”的内容就介绍到这里了,感谢大家的阅读。如果想了解更多行业相关的知识可以关注创新互联网站,小编将为大家输出更多高质量的实用文章!
本文名称:USB摄像头linux实现方法是什么
URL分享:http://scyanting.com/article/gijdij.html