Qt浅谈之四十Centos下Qt结合v4l2实现的视频显示

2024-03-11 10:18

本文主要是介绍Qt浅谈之四十Centos下Qt结合v4l2实现的视频显示,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!

一、简介

 

       v4l2是针对uvc免驱usb设备的编程框架,主要用于采集usb摄像头。 可从网上下载最新的源码(包括v4l2.c和v4l2.h两个文件),本文中修改过。
      Qt运行界面如下(动态变化的):

 

二、详解

1、准备

(1)插入usb摄像头,检测设备文件/dev/video0

与代码中的pd.dev_name = "/dev/video0";保持一致。
(2)检测颜色编码
安装包#yum install v4l-utils,然后执行命令#v4l2-ctl -d /dev/video0 --list-formats

颜色编码格式为YUYV,与代码中的s->fmt.fmt.pix.pixelformat    = V4L2_PIX_FMT_YUYV;保持一致。
(3)遇到错误
在centos6.6中,遇到了错误:

VIDIOC_STREAMON error 28, 设备上没有空间

没有查到原因,不清楚什么问题,试了如下方法也不行:

#rmmod uvcvideo
#modprobe uvcvideo quirks=128

于是切换到centos6.3上,能顺利的运行(在虚拟机中也是可以正常运行的)。

 

2、主要点

(1)将YUYV转换成rgb显示在界面,以前使用MPEG没有显示

convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480);
QImage image(bufrgb,640,480,QImage::Format_RGB888);
ui.displayLabel->setPixmap(QPixmap::fromImage(image));

(2)将视频流保存到本地文件(最小单位为秒,需要更快的可以调整到毫米)

if (bufrgb > 0 && strlen((char *)bufrgb) > 0) {tm_time = localtime(&now);char filename[30] = {0};sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday,tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec);QImage image(bufrgb,640,480,QImage::Format_RGB888);image.save(filename);
}

3、完整代码

(1)v4l2.h

#ifndef		__V4L2_H__
#define		__V4L2_H__
#include <linux/types.h>
#include <linux/videodev2.h>#ifdef __cplusplus
extern "C" {
#endiftypedef	struct
{void	*start;size_t	length;
}buffer;typedef	struct
{int			fd;int			n_buffers;char			*dev_name;buffer			*buffers;struct v4l2_buffer	buf;struct v4l2_format 	fmt;
}pass_data;int init_dev (pass_data *s);
void stop_dev (pass_data *s);
void read_frame(pass_data *s);
void return_data (pass_data *s);void init_mmap(pass_data *s);
void init_device(pass_data *s);
int open_device(pass_data *s);
void start_capturing(pass_data *s);
void close_device(pass_data *s);
void stop_capturing(pass_data *s);
void stop_device(pass_data *s);
void errno_exit(const char *s);
int xioctl(int fd, int request, void *arg);
void process_image(void *p, pass_data *s, int i);int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);#ifdef __cplusplus
}
#endif#endif

(2)v4l2.c

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <errno.h>#include <fcntl.h>
#include <unistd.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include "v4l2.h"#define	CLEAR(x)	memset (&x, 0, sizeof(x))
char *mpeg[] = {"./1.mpeg", "./2.mpeg", "./3.mpeg", "./4.mpeg", "./5.mpeg"};int init_dev (pass_data *s)
{int flag = open_device(s);if (flag != 0) {return flag;}init_device(s);init_mmap(s);start_capturing(s);fprintf(stdout, "'%s' initialize finish ...\n", s->dev_name);return 0;
}void stop_dev (pass_data *s)
{stop_capturing(s);stop_device(s);close_device(s);fprintf(stdout, "close '%s' ...\n", s->dev_name);
}void process_image(void *p, pass_data *s, int i)
{fputc ('.', stdout);fflush (stdout);fprintf (stderr, "%s", mpeg[i]);int fd;	if ((fd = open (mpeg[i], O_RDWR | O_CREAT | O_TRUNC, 0644)) == -1)errno_exit("open");if ((write (fd, (struct v4l2_buffer *)p, s->fmt.fmt.pix.sizeimage)) == -1)errno_exit("write");close (fd);
}void read_frame(pass_data *s)
{CLEAR (s->buf);s->buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;s->buf.memory	= 	V4L2_MEMORY_MMAP;if (xioctl (s->fd, VIDIOC_DQBUF, &s->buf) == -1){switch (errno){case EAGAIN:errno_exit ("VIDIOC_DQBUF");case EIO:/* could ignore EIO, see spec. */default:errno_exit ("VIDIOC_DQBUF");}}assert (s->buf.index < s->n_buffers);
}void return_data (pass_data *s)
{if (xioctl (s->fd, VIDIOC_QBUF, &s->buf) == -1)	errno_exit ("VIDIOC_QBUF");
}void start_capturing(pass_data *s)
{unsigned int i;enum v4l2_buf_type type;for (i = 0; i < s->n_buffers; ++ i){struct v4l2_buffer buf;CLEAR (buf);buf.type	= 	V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory	=	V4L2_MEMORY_MMAP;buf.index	=	i;if (xioctl (s->fd, VIDIOC_QBUF, &buf) == -1)errno_exit("VIDIOC_QBUF");}type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl (s->fd, VIDIOC_STREAMON, &type))errno_exit("VIDIOC_STREAMON");
}void stop_capturing(pass_data *s)
{enum v4l2_buf_type type;type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl (s->fd, VIDIOC_STREAMOFF, &type))errno_exit("VIDIOC_STREAMOFF");
}void init_mmap(pass_data *s)
{struct v4l2_requestbuffers req;CLEAR (req);req.count	=	20;req.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory	=	V4L2_MEMORY_MMAP;if (xioctl (s->fd, VIDIOC_REQBUFS, &req)){if (EINVAL == errno){fprintf(stderr, "%s does not support 'memory mapping'\n", s->dev_name);exit (EXIT_FAILURE);}else{errno_exit ("VIDIOC_REQBUFS");}}if (req.count < 2){fprintf(stderr, "Insufficient buffer memory on %s\n", s->dev_name);exit (EXIT_FAILURE);}	if ((s->buffers = (buffer *)calloc (req.count, sizeof (*s->buffers))) == NULL){fprintf(stderr, "Out of memory\n");exit ( EXIT_FAILURE);}for (s->n_buffers = 0; s->n_buffers < req.count; ++ s->n_buffers){struct v4l2_buffer buf;CLEAR (buf);buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory	=	V4L2_MEMORY_MMAP;buf.index	=	s->n_buffers;if (xioctl (s->fd, VIDIOC_QUERYBUF, &buf) == -1)errno_exit("VIDIOC_QUERYBUF");s->buffers[s->n_buffers].length	=	buf.length;s->buffers[s->n_buffers].start	=mmap(   NULL,buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,s->fd,buf.m.offset	);if (s->buffers[s->n_buffers].start == MAP_FAILED)errno_exit ("mmap");
#if 	_DEBUG_fprintf(stdout, "%d -> %p\n", s->n_buffers, s->buffers[s->n_buffers].start);
#endif}
}void init_device(pass_data* s)
{struct v4l2_capability cap;struct v4l2_cropcap cropcap;struct v4l2_crop crop;unsigned int min;if (xioctl (s->fd, VIDIOC_QUERYCAP, &cap) == -1){if (EINVAL == errno){fprintf (stderr, "%s is no V4L2 device\n", s->dev_name);exit (EXIT_FAILURE);}else{errno_exit ("VIDIOC_QUERYCAP");}}if (! (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)){fprintf(stderr, "%s is no video captrue device\n", s->dev_name);exit(EXIT_FAILURE);}if (! (cap.capabilities & V4L2_CAP_STREAMING)){fprintf(stderr, "%s does not support straming I/O\n", s->dev_name);exit(EXIT_FAILURE);}	CLEAR(cropcap);cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl (s->fd, VIDIOC_CROPCAP, &cropcap) == 0){crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;crop.c = cropcap.defrect;if (xioctl (s->fd, VIDIOC_S_CROP, &crop)){switch (errno){case EINVAL:break;default:break;}}else{/* Errors ignored */}}CLEAR (s->fmt);s->fmt.type		= V4L2_BUF_TYPE_VIDEO_CAPTURE;s->fmt.fmt.pix.width	= 640;s->fmt.fmt.pix.height	= 480;s->fmt.fmt.pix.pixelformat	= V4L2_PIX_FMT_YUYV;s->fmt.fmt.pix.field	= V4L2_FIELD_INTERLACED;if (xioctl (s->fd, VIDIOC_S_FMT, &s->fmt) == -1)errno_exit("VIDIOC_S_FMT");min = s->fmt.fmt.pix.width * 2;if (s->fmt.fmt.pix.bytesperline < min)s->fmt.fmt.pix.bytesperline = min;min = s->fmt.fmt.pix.bytesperline * s->fmt.fmt.pix.height;if (s->fmt.fmt.pix.sizeimage < min)s->fmt.fmt.pix.sizeimage = min;}void stop_device(pass_data *s)
{unsigned int i;for (i = 0; i < s->n_buffers; ++i)if (munmap (s->buffers[i].start, s->buffers[i].length) == -1)errno_exit("munmap");
}int open_device(pass_data *s)
{struct stat st;if (stat (s->dev_name, &st) == -1){fprintf(stderr, "Can't identify '%s':[%d] %s\n", s->dev_name, errno, strerror(errno));return -1;}if (!S_ISCHR (st.st_mode)){fprintf(stderr, "%s is no device\n", s->dev_name);return -2;}if ((s->fd = open (s->dev_name, O_RDWR, 0)) == -1 ){fprintf(stderr, "Can't oprn '%s': error %d, %s\n", s->dev_name, errno, strerror(errno));return -2;}return 0;
}void close_device(pass_data *s)
{close (s->fd);
}int xioctl(int fd, int request, void *arg)
{int r;do r = ioctl(fd, request, arg);while(r == -1 && EINTR == errno);return r;
}void errno_exit(const char *s)
{fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));exit(EXIT_FAILURE);
}static int convert_yuv_to_rgb_pixel(int y, int u, int v)
{unsigned int pixel32 = 0;unsigned char *pixel = (unsigned char *)&pixel32;int r, g, b;r = y + (1.370705 * (v-128));g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));b = y + (1.732446 * (u-128));if(r > 255) r = 255;if(g > 255) g = 255;if(b > 255) b = 255;if(r < 0) r = 0;if(g < 0) g = 0;if(b < 0) b = 0;pixel[0] = r * 220 / 256;pixel[1] = g * 220 / 256;pixel[2] = b * 220 / 256;return pixel32;
}int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{unsigned int in, out = 0;unsigned int pixel_16;unsigned char pixel_24[3];unsigned int pixel32;int y0, u, y1, v;for(in = 0; in < width * height * 2; in += 4) {pixel_16 = yuv[in + 3] << 24 |yuv[in + 2] << 16 |yuv[in + 1] <<  8 |yuv[in + 0];y0 = (pixel_16 & 0x000000ff);u  = (pixel_16 & 0x0000ff00) >>  8;y1 = (pixel_16 & 0x00ff0000) >> 16;v  = (pixel_16 & 0xff000000) >> 24;pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);pixel_24[0] = (pixel32 & 0x000000ff);pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;rgb[out++] = pixel_24[0];rgb[out++] = pixel_24[1];rgb[out++] = pixel_24[2];pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);pixel_24[0] = (pixel32 & 0x000000ff);pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;rgb[out++] = pixel_24[0];rgb[out++] = pixel_24[1];rgb[out++] = pixel_24[2];}return 0;
}

(3)videodisplay.h

#ifndef VIDEODISPLAY_H
#define VIDEODISPLAY_H#include <QtGui>
#include "ui_dialog.h"
#include "v4l2.h"class VideoDisplay : public QDialog
{Q_OBJECTpublic:VideoDisplay(QWidget *parent = 0);~VideoDisplay();private slots:void beginCapture();void flushBuff();void savebmpData();private:Ui::Dialog ui;pass_data pd;QTimer *timer;unsigned char *bufrgb;
};#endif // VIDEODISPLAY_H

(4)videodisplay.cpp

#include <string.h>
#include "videodisplay.h"VideoDisplay::VideoDisplay(QWidget *parent): QDialog(parent), bufrgb(NULL)
{ui.setupUi(this);connect(ui.beginButton, SIGNAL(clicked()), this, SLOT(beginCapture()));connect(ui.saveButton, SIGNAL(clicked()), this, SLOT(savebmpData()));connect(ui.exitButton, SIGNAL(clicked()), this, SLOT(reject()));timer = new QTimer(this);timer->setInterval(10);connect(timer, SIGNAL(timeout()), this, SLOT(flushBuff()));pd.dev_name = "/dev/video0";
}VideoDisplay::~VideoDisplay()
{if (timer->isActive()) {timer->stop();}
}void VideoDisplay::beginCapture()
{int flag = init_dev(&pd);if (flag == -1) {QMessageBox::information(this,tr("Tip"),tr("no device"));exit(1);}else if (flag == -2) {QMessageBox::information(this,tr("Tip"),tr("device is wrong"));exit(2);}else if (flag == -3) {QMessageBox::information(this,tr("Tip"),tr("can not open device"));exit(3);}timer->start();ui.beginButton->setDisabled(TRUE);
}void VideoDisplay::flushBuff()
{read_frame (&pd);if (!bufrgb) {bufrgb = (unsigned char *)malloc(640 * 480* 3);}memset(bufrgb, 0, 640 * 480* 3);convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480);QImage image(bufrgb,640,480,QImage::Format_RGB888);ui.displayLabel->setPixmap(QPixmap::fromImage(image));return_data(&pd);
}void VideoDisplay::savebmpData()
{FILE *fp;time_t now;struct tm *tm_time;time(&now);if (bufrgb > 0 && strlen((char *)bufrgb) > 0) {tm_time = localtime(&now);char filename[30] = {0};sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday,tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec);QImage image(bufrgb,640,480,QImage::Format_RGB888);image.save(filename);}
}

(5)编译运行
 
点击BEGIN开始显示,点击SAVE会在当前的可执行程序目录下生成界面显示的视频的png的文件:2016-01-20_17.28.36.png、2016-01-20_17.36.08.png

 

三、总结

 

(1)不同的系统中v4l2的问题不一样,通过调整参数可以解决部分,但有些因能力有限实在无法解决。
(2)该文仅是以前毕业设计的一个开头,还有图像的转化和处理等一系列的自动识别的功能,接着可以将图片存放到数据库。
(3)若有问题或建议,请留言,在此感谢!

这篇关于Qt浅谈之四十Centos下Qt结合v4l2实现的视频显示的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!



http://www.chinasem.cn/article/797514

相关文章

Java StringBuilder 实现原理全攻略

《JavaStringBuilder实现原理全攻略》StringBuilder是Java提供的可变字符序列类,位于java.lang包中,专门用于高效处理字符串的拼接和修改操作,本文给大家介绍Ja... 目录一、StringBuilder 基本概述核心特性二、StringBuilder 核心实现2.1 内部

Android实现图片浏览功能的示例详解(附带源码)

《Android实现图片浏览功能的示例详解(附带源码)》在许多应用中,都需要展示图片并支持用户进行浏览,本文主要为大家介绍了如何通过Android实现图片浏览功能,感兴趣的小伙伴可以跟随小编一起学习一... 目录一、项目背景详细介绍二、项目需求详细介绍三、相关技术详细介绍四、实现思路详细介绍五、完整实现代码

SpringBoot AspectJ切面配合自定义注解实现权限校验的示例详解

《SpringBootAspectJ切面配合自定义注解实现权限校验的示例详解》本文章介绍了如何通过创建自定义的权限校验注解,配合AspectJ切面拦截注解实现权限校验,本文结合实例代码给大家介绍的非... 目录1. 创建权限校验注解2. 创建ASPectJ切面拦截注解校验权限3. 用法示例A. 参考文章本文

SpringBoot集成redisson实现延时队列教程

《SpringBoot集成redisson实现延时队列教程》文章介绍了使用Redisson实现延迟队列的完整步骤,包括依赖导入、Redis配置、工具类封装、业务枚举定义、执行器实现、Bean创建、消费... 目录1、先给项目导入Redisson依赖2、配置redis3、创建 RedissonConfig 配

Python的Darts库实现时间序列预测

《Python的Darts库实现时间序列预测》Darts一个集统计、机器学习与深度学习模型于一体的Python时间序列预测库,本文主要介绍了Python的Darts库实现时间序列预测,感兴趣的可以了解... 目录目录一、什么是 Darts?二、安装与基本配置安装 Darts导入基础模块三、时间序列数据结构与

Python使用FastAPI实现大文件分片上传与断点续传功能

《Python使用FastAPI实现大文件分片上传与断点续传功能》大文件直传常遇到超时、网络抖动失败、失败后只能重传的问题,分片上传+断点续传可以把大文件拆成若干小块逐个上传,并在中断后从已完成分片继... 目录一、接口设计二、服务端实现(FastAPI)2.1 运行环境2.2 目录结构建议2.3 serv

C#实现千万数据秒级导入的代码

《C#实现千万数据秒级导入的代码》在实际开发中excel导入很常见,现代社会中很容易遇到大数据处理业务,所以本文我就给大家分享一下千万数据秒级导入怎么实现,文中有详细的代码示例供大家参考,需要的朋友可... 目录前言一、数据存储二、处理逻辑优化前代码处理逻辑优化后的代码总结前言在实际开发中excel导入很

SpringBoot+RustFS 实现文件切片极速上传的实例代码

《SpringBoot+RustFS实现文件切片极速上传的实例代码》本文介绍利用SpringBoot和RustFS构建高性能文件切片上传系统,实现大文件秒传、断点续传和分片上传等功能,具有一定的参考... 目录一、为什么选择 RustFS + SpringBoot?二、环境准备与部署2.1 安装 RustF

Nginx部署HTTP/3的实现步骤

《Nginx部署HTTP/3的实现步骤》本文介绍了在Nginx中部署HTTP/3的详细步骤,文中通过示例代码介绍的非常详细,对大家的学习或者工作具有一定的参考学习价值,需要的朋友们下面随着小编来一起学... 目录前提条件第一步:安装必要的依赖库第二步:获取并构建 BoringSSL第三步:获取 Nginx

MyBatis Plus实现时间字段自动填充的完整方案

《MyBatisPlus实现时间字段自动填充的完整方案》在日常开发中,我们经常需要记录数据的创建时间和更新时间,传统的做法是在每次插入或更新操作时手动设置这些时间字段,这种方式不仅繁琐,还容易遗漏,... 目录前言解决目标技术栈实现步骤1. 实体类注解配置2. 创建元数据处理器3. 服务层代码优化填充机制详