关键字: 正点原子,i.MX6UL,v4l2, framebuffer
- 主机环境:ubuntu16.04-64bit
- 硬件版本:正点原子i.MX6UL emmc
固件帖在本站:[[正点原子i.MX6UL开发板] 编译uboot、linux、buildroot-rootfs](https://whycan.cn/t_3550.html)
---
### 开篇
2020年,注定是不平凡的一年,就当是纸老虎吧。
我遇到的第一个纸老虎是v4l2。
不能出门,就想着玩下USB摄像头,想实现的功能是在LCD屏上实时预览USB摄像头的画面。
搜资料,知道要用v4l2来驱动USB摄像头,第一个难点就是“v4l2”怎么读都不顺。。。
最终,跳过这个难点,确定了使用v4l2+framebuffer预览USB摄像头画面。
参考把“大象放进冰箱”,我决定把实现分成三步,以降低难度、建立信心。
本篇主要实现采集拍照。
### 正题
#### 一、相关概念
概念尽量简短不占篇幅,主要记录实践的过程。
- USB摄像头用的是UVC(USB Video Class)标准类,免驱。
- v4l2(Video for linux2)是linux提供的视频设备编程API。
- USB摄像头一般都支持输出YUV数据。
- YUV是一种颜色空间,通常需要转成RGB使用(bmp保存或LCD显示,等)。
- YUYV是YUV的一种,也叫YUV422。
- BMP图是RGB数据,上下颠倒存放的。
#### 二、操作分解
v4l2驱动USB摄像头的采集流程还是容易看明白,大致是:打开摄像头设置参数,申请设置缓冲区,然后从缓冲区每次读一帧即可。
##### s0.核心指令
v4l2 主要通过调用 ioctl 配合一系列 VIDIOC_XXXXXX 指令完成,很友好。
#include <sys/ioctl.h>
#include <linux/videodev2.h>
##### s1.打开摄像头
俗话说,一切皆文件。
#include <fcntl.h>
fd = open("/dev/videox", O_RDWR);
##### s2.获取属性
ioctl(fd, VIDIOC_QUERYCAP, &cap);
##### s3.设置参数
输出格式、分辨率、帧率等。
ioctl(fd, VIDIOC_S_FMT, &fmt);
ioctl(fd, VIDIOC_S_PARM, &setfps);
##### s4.设置缓冲区
- 简单理解就是配置FIFO(先进先出环形队列)。
- 映射到用户空间。
- 核心操作围绕struct v4l2_buffer结构体的index成员变量。
struct v4l2_buffer buf;
// 出队
ioctl(fd, VIDIOC_DQBUF, &buf);
// 入队
ioctl(fd, VIDIOC_QBUF, &buf);
##### s5.采集控制
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// 启动采集
ioctl (fd, VIDIOC_STREAMON, &type);
// 停止采集
ioctl (fd, VIDIOC_STREAMOFF, &type);
##### s6.采集一帧
ioctl(fd, VIDIOC_DQBUF, &buf);
##### s7.视频处理
转换成RGB,然后保存成BMP图片实现拍照。
#### 三、代码实现
主要参考了开源项目MJPG-streamer的v4l2采集代码,完全手打。
- 代码1
本代码主要实现了获取USB相机的信息,相当于v4l2版的"hello world"。
/*
* @FilePath: /v4l2-fb-uvc-code/v4l2grap-uvc-getinfo.c
* @Version: 1.0.0
* @Author: zys
* @LastAuthor : zys
* @CreationDate: 2020-02-04 11:33:25
* @LastEditTime : 2020-02-05 17:57:18
* @Description : 获取USB相机的信息,相当于v4l2版的"hello world"。
*/
/*----------------------------------------------------------------------------*/
/* change log ----------------------------------------------------------------*/
/*
-0205
获取USB相机信息 ok.
*/
/*----------------------------------------------------------------------------*/
/* Includes ------------------------------------------------------------------*/
#include <fcntl.h>
#include <linux/videodev2.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
/* Private define ------------------------------------------------------------*/
#define _UVC_CAM_HEIGHT (680)
#define _UVC_CAM_WIDTH (480)
/* Private macro -------------------------------------------------------------*/
/* Private typedef -----------------------------------------------------------*/
struct vdIn {
int fd;
char* videodevice;
// v4l2
struct v4l2_capability cap;
struct v4l2_format fmt;
struct v4l2_fmtdesc fmtdesc;
struct v4l2_streamparm setfps;
struct v4l2_requestbuffers rb;
int width;
int height;
int fps;
};
/* Private function prototypes -----------------------------------------------*/
/* Private variables ---------------------------------------------------------*/
static struct vdIn uvc_cam;
/* Global variables ---------------------------------------------------------*/
/* Private functions ---------------------------------------------------------*/
int v4l2_init(void)
{
int ret;
// 1. open cam
if ((uvc_cam.fd = open(uvc_cam.videodevice, O_RDWR)) == -1) {
printf("ERROR opening V4L interface\n");
return -1;
}
// 2. querycap
memset(&uvc_cam.cap, 0, sizeof(struct v4l2_capability));
ret = ioctl(uvc_cam.fd, VIDIOC_QUERYCAP, &uvc_cam.cap);
if (ret < 0) {
printf("Error opening device %s: unable to query device.\n", uvc_cam.videodevice);
return -1;
}
else {
printf("driver:\t\t%s\n", uvc_cam.cap.driver);
printf("card:\t\t%s\n", uvc_cam.cap.card);
printf("bus_info:\t%s\n", uvc_cam.cap.bus_info);
printf("version:\t%d\n", uvc_cam.cap.version);
printf("capabilities:\t%x\n", uvc_cam.cap.capabilities);
if ((uvc_cam.cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == V4L2_CAP_VIDEO_CAPTURE) {
printf("%s: \tsupports capture.\n", uvc_cam.videodevice);
}
if ((uvc_cam.cap.capabilities & V4L2_CAP_STREAMING) == V4L2_CAP_STREAMING) {
printf("%s: \tsupports streaming.\n", uvc_cam.videodevice);
}
}
// 3. set format in
// 3.1 enum fmt
printf("\nSupport format:\n");
memset(&uvc_cam.fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
uvc_cam.fmtdesc.index = 0;
uvc_cam.fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (ioctl(uvc_cam.fd, VIDIOC_ENUM_FMT, &uvc_cam.fmtdesc) != -1) {
printf("\t%d.%s\n", uvc_cam.fmtdesc.index + 1, uvc_cam.fmtdesc.description);
uvc_cam.fmtdesc.index++;
}
// 3.2 set fmt
memset(&uvc_cam.fmt, 0, sizeof(struct v4l2_format));
uvc_cam.fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
uvc_cam.fmt.fmt.pix.width = uvc_cam.width;
uvc_cam.fmt.fmt.pix.height = uvc_cam.height;
uvc_cam.fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
uvc_cam.fmt.fmt.pix.field = V4L2_FIELD_ANY;
ret = ioctl(uvc_cam.fd, VIDIOC_S_FMT, &uvc_cam.fmt);
if (ret < 0) {
printf("Unable to set format\n");
return -1;
}
// 3.3 get fmt
ret = ioctl(uvc_cam.fd, VIDIOC_G_FMT, &uvc_cam.fmt);
if (ret < 0) {
printf("Unable to get format\n");
return -1;
}
else {
printf("\nfmt.type:\t\t%d\n", uvc_cam.fmt.type);
printf("pix.pixelformat:\t%c%c%c%c\n", uvc_cam.fmt.fmt.pix.pixelformat & 0xFF, (uvc_cam.fmt.fmt.pix.pixelformat >> 8) & 0xFF, (uvc_cam.fmt.fmt.pix.pixelformat >> 16) & 0xFF, (uvc_cam.fmt.fmt.pix.pixelformat >> 24) & 0xFF);
printf("pix.height:\t\t%d\n", uvc_cam.fmt.fmt.pix.height);
printf("pix.width:\t\t%d\n", uvc_cam.fmt.fmt.pix.width);
printf("pix.field:\t\t%d\n", uvc_cam.fmt.fmt.pix.field);
}
// 4. set fps
memset(&uvc_cam.setfps, 0, sizeof(struct v4l2_streamparm));
uvc_cam.setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
uvc_cam.setfps.parm.capture.timeperframe.numerator = 1;
uvc_cam.setfps.parm.capture.timeperframe.denominator = 25;
ret = ioctl(uvc_cam.fd, VIDIOC_S_PARM, &uvc_cam.setfps);
if (ret < 0) {
printf("Unable to set frame rate\n");
return -1;
}
else {
printf("set fps OK!\n");
}
ret = ioctl(uvc_cam.fd, VIDIOC_G_PARM, &uvc_cam.setfps);
if (ret < 0) {
printf("Unable to get frame rate\n");
return -1;
}
else {
printf("get fps OK:\n");
printf("timeperframe.numerator : %d\n", uvc_cam.setfps.parm.capture.timeperframe.numerator);
printf("timeperframe.denominator: %d\n", uvc_cam.setfps.parm.capture.timeperframe.denominator);
printf("set fps : %d\n", 1 * uvc_cam.setfps.parm.capture.timeperframe.denominator / uvc_cam.setfps.parm.capture.timeperframe.numerator);
}
// 5. enum framesizes
int i = 0;
while (1) {
struct v4l2_fmtdesc fmtdesc;
memset(&fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
fmtdesc.index = i++;
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(uvc_cam.fd, VIDIOC_ENUM_FMT, &fmtdesc) < 0) {
break;
}
printf("Supported format: %s\n", fmtdesc.description);
struct v4l2_frmsizeenum fsenum;
memset(&fsenum, 0, sizeof(struct v4l2_frmsizeenum));
fsenum.pixel_format = uvc_cam.fmtdesc.pixelformat;
int j = 0;
while (1) {
fsenum.index = j;
j++;
if (ioctl(uvc_cam.fd, VIDIOC_ENUM_FRAMESIZES, &fsenum) == 0) {
if (uvc_cam.fmt.fmt.pix.pixelformat == fmtdesc.pixelformat) {
printf("\tSupported size with the current format: %dx%d\n", fsenum.discrete.width, fsenum.discrete.height);
}
else {
printf("\tSupported size: %dx%d\n", fsenum.discrete.width, fsenum.discrete.height);
}
}
else {
break;
}
}
}
printf("init %s \t[OK]\n", uvc_cam.videodevice);
return 0;
}
void v4l2_exit(void)
{
close(uvc_cam.fd);
}
int main(int argc, char const* argv[])
{
char vdname[15];
printf("\n----- v4l2 app start ----- \n");
if (argc < 2) {
printf("need:/dev/videox\n");
printf("like:./av4l2grap-uvc-x /dev/video1\n");
printf("app exit.\n\n");
exit(1);
}
snprintf(vdname, 12, argv[1]);
memset(&uvc_cam, 0, sizeof(struct vdIn));
uvc_cam.videodevice = vdname;
printf("using: \t\t%s\n", uvc_cam.videodevice);
uvc_cam.width = _UVC_CAM_WIDTH;
uvc_cam.height = _UVC_CAM_HEIGHT;
if (v4l2_init() < 0) {
goto app_exit;
}
app_exit:
printf("app exit.\n\n");
v4l2_exit();
return 0;
}
/*************************** (C) COPYRIGHT 2020 ZYS ************END OF FILE****/
- 代码2
拍照代码还未实现,后续会更新上~
#### 四、测试程序
- 编译程序
arm-buildroot-linux-gnueabihf-gcc -o av4l2grap-uvc-getinfo-x v4l2grap-uvc-getinfo.c
- 确认开发板的USB相机设备号
重要:记得插上USB相机!
# ls /dev/video*
/dev/video0 /dev/video1 /dev/video2
# cat /sys/class/video4linux/video1/name
Aoni HD Camera
#
- 运行程序
先把程序复制到开发板,推荐用NFS。
运行程序:
# ./av4l2grap-uvc-getinfo-x /dev/video1
----- v4l2 app start -----
using: /dev/video1
driver: uvcvideo
card: Aoni HD Camera
bus_info: usb-ci_hdrc.1-1.4
version: 262415
capabilities: 84200001
/dev/video1: supports capture.
/dev/video1: supports streaming.
Support format:
1.MJPEG
2.YUV 4:2:2 (YUYV)
fmt.type: 1
pix.pixelformat: YUYV
pix.height: 480
pix.width: 640
pix.field: 1
set fps OK!
get fps OK:
timeperframe.numerator : 1
timeperframe.denominator: 25
set fps : 25
Supported format: MJPEG
Supported size: 640x480
Supported size: 1280x720
Supported size: 1184x656
Supported size: 1024x576
Supported size: 960x720
Supported size: 960x540
Supported size: 864x486
Supported size: 800x600
Supported size: 752x423
Supported size: 640x360
Supported size: 320x240
Supported format: YUV 4:2:2 (YUYV)
Supported size with the current format: 640x480
Supported size with the current format: 1280x720
Supported size with the current format: 1184x656
Supported size with the current format: 1024x576
Supported size with the current format: 960x720
Supported size with the current format: 960x540
Supported size with the current format: 864x486
Supported size with the current format: 800x600
Supported size with the current format: 752x423
Supported size with the current format: 640x360
Supported size with the current format: 320x240
init /dev/video1 [OK]
app exit.
#
离线
占楼
离线
占楼
离线
楼主的文档和代码一样优秀!
离线
#### 拍照代码测试Ok
0207
---
#### 代码2 实现拍照功能
运行程序后会在当前目录生成uvc_grap.bmp。可以复制到ubuntu查看图片。
- 知识点
主要是自己的理解。
1.yuyv与rgb888:
yuyv原始数据里,两个像素点共用了uv分量,两个像素占用4个字节;
rgb888大家应该都知道了,一个像素点占用3个字节,那么两个像素点占用6个字节。
由此可知,yuyv与rgb888字节量比列是 4:6,也就是 2:3。
知道这些就可以定义相关数据缓冲区大小了:
// buf for yuyv
uvc_cam.framesizeIn = uvc_cam.width * uvc_cam.height << 1; // w * h * 2
uvc_cam.framebuffer = (unsigned char*)calloc(1, (size_t)uvc_cam.framesizeIn);
// buf for rgb888
static unsigned char rgb888_buffer[IMAGEWIDTH * IMAGEHEIGHT * 3];
2.bmp:
基础的图片格式网上很多介绍,不再说明,核心关注点是——数据上下颠倒存放的。
- 实现源码:
/*
* @FilePath: /v4l2-fb-uvc-code/v4l2grap-uvc-savebmp.c
* @Version: 1.0.0
* @Author: zys
* @LastAuthor : zys
* @CreationDate: 2020-02-04 11:33:25
* @LastEditTime : 2020-02-07 11:08:21
* @Description : 采集一帧USB摄像头数据,保存成bmp图片。
*/
/*----------------------------------------------------------------------------*/
/* change log ----------------------------------------------------------------*/
/*
-0205
获取USB相机信息 ok.
-0207
实现拍照功能:运行程序后会在当前目录生成uvc_grap.bmp。可以复制到ubuntu查看图片。
*/
/*----------------------------------------------------------------------------*/
/* Includes ------------------------------------------------------------------*/
#include <fcntl.h>
#include <linux/videodev2.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
/* Private define ------------------------------------------------------------*/
#define _UVC_CAM_HEIGHT (480)
#define _UVC_CAM_WIDTH (640)
#define IMAGEHEIGHT _UVC_CAM_HEIGHT
#define IMAGEWIDTH _UVC_CAM_WIDTH
#define NB_BUFFER 4
/* Private macro -------------------------------------------------------------*/
/* Private typedef -----------------------------------------------------------*/
struct vdIn {
int fd;
char* videodevice;
// v4l2
struct v4l2_capability cap;
struct v4l2_format fmt;
struct v4l2_fmtdesc fmtdesc;
struct v4l2_streamparm setfps;
struct v4l2_requestbuffers rb;
void* mem[NB_BUFFER];
int memlength[NB_BUFFER];
unsigned char* framebuffer;
int framesizeIn;
int width;
int height;
int fps;
FILE* fp_bmp;
};
/*_attribute__((packed))的作用是告诉编译器取消结构在编译过程中的优化对齐 */
//14byte文件头
typedef struct
{
unsigned char cfType[2]; //文件类型,"BM"(0x4D42)
unsigned int cfSize; //文件大小(字节)
unsigned int cfReserved; //保留,值为0
unsigned int cfoffBits; //数据区相对于文件头的偏移量(字节)
} __attribute__((packed)) BITMAPFILEHEADER;
//40byte信息头
typedef struct
{
unsigned int ciSize; //BITMAPFILEHEADER所占的字节数
unsigned int ciWidth; //宽度
unsigned int ciHeight; //高度
unsigned short int ciPlanes; //目标设备的位平面数,值为1
unsigned short int ciBitCount; //每个像素的位数
char ciCompress[4]; //压缩说明
unsigned int ciSizeImage; //用字节表示的图像大小,该数据必须是4的倍数
unsigned int ciXPelsPerMeter; //目标设备的水平像素数/米
unsigned int ciYPelsPerMeter; //目标设备的垂直像素数/米
unsigned int ciClrUsed; //位图使用调色板的颜色数
unsigned intciClrImportant; //指定重要的颜色数,当该域的值等于颜色数时(或者等于0时),表示所有颜色都一样重要
} __attribute__((packed)) BITMAPINFOHEADER;
typedef struct
{
unsigned char blue;
unsigned char green;
unsigned char red;
} __attribute__((packed)) PIXEL; //颜色模式RGB
/* Private function prototypes -----------------------------------------------*/
/* Private variables ---------------------------------------------------------*/
static struct vdIn uvc_cam;
static unsigned char rgb888_buffer[IMAGEWIDTH * IMAGEHEIGHT * 3];
/* Global variables ---------------------------------------------------------*/
/* Private functions ---------------------------------------------------------*/
void yuyv_to_rgb888(void)
{
int i, j;
unsigned char y1, y2, u, v;
int r1, g1, b1, r2, g2, b2;
unsigned char* pointer;
double rbase = 0;
double gbase = 0;
double bbase = 0;
pointer = uvc_cam.framebuffer;
for (i = 0; i < IMAGEHEIGHT; i++) {
for (j = 0; j < (IMAGEWIDTH / 2); j++) {
y1 = *(pointer + ((i * (IMAGEWIDTH / 2) + j) << 2));
u = *(pointer + ((i * (IMAGEWIDTH / 2) + j) << 2) + 1);
y2 = *(pointer + ((i * (IMAGEWIDTH / 2) + j) << 2) + 2);
v = *(pointer + ((i * (IMAGEWIDTH / 2) + j) << 2) + 3);
rbase = 1.042 * (v - 128);
gbase = 0.34414 * (u - 128) - 0.71414 * (v - 128);
bbase = 1.772 * (u - 128);
r1 = y1 + rbase;
g1 = y1 - gbase;
b1 = y1 + bbase;
r2 = y2 + rbase;
g2 = y2 - gbase;
b2 = y2 + bbase;
if (r1 > 255)
r1 = 255;
else if (r1 < 0)
r1 = 0;
if (b1 > 255)
b1 = 255;
else if (b1 < 0)
b1 = 0;
if (g1 > 255)
g1 = 255;
else if (g1 < 0)
g1 = 0;
if (r2 > 255)
r2 = 255;
else if (r2 < 0)
r2 = 0;
if (b2 > 255)
b2 = 255;
else if (b2 < 0)
b2 = 0;
if (g2 > 255)
g2 = 255;
else if (g2 < 0)
g2 = 0;
*(rgb888_buffer + ((IMAGEHEIGHT - 1 - i) * (IMAGEWIDTH / 2) + j) * 6) = (unsigned char)b1;
*(rgb888_buffer + ((IMAGEHEIGHT - 1 - i) * (IMAGEWIDTH / 2) + j) * 6 + 1) = (unsigned char)g1;
*(rgb888_buffer + ((IMAGEHEIGHT - 1 - i) * (IMAGEWIDTH / 2) + j) * 6 + 2) = (unsigned char)r1;
*(rgb888_buffer + ((IMAGEHEIGHT - 1 - i) * (IMAGEWIDTH / 2) + j) * 6 + 3) = (unsigned char)b2;
*(rgb888_buffer + ((IMAGEHEIGHT - 1 - i) * (IMAGEWIDTH / 2) + j) * 6 + 4) = (unsigned char)g2;
*(rgb888_buffer + ((IMAGEHEIGHT - 1 - i) * (IMAGEWIDTH / 2) + j) * 6 + 5) = (unsigned char)r2;
}
}
printf("yuyv to rgb888 done\n");
}
int v4l2_init(void)
{
int i = 0;
int ret;
struct v4l2_buffer buf;
// 1. open cam
if ((uvc_cam.fd = open(uvc_cam.videodevice, O_RDWR)) == -1) {
printf("ERROR opening V4L interface\n");
return -1;
}
// 2. querycap
memset(&uvc_cam.cap, 0, sizeof(struct v4l2_capability));
ret = ioctl(uvc_cam.fd, VIDIOC_QUERYCAP, &uvc_cam.cap);
if (ret < 0) {
printf("Error opening device %s: unable to query device.\n", uvc_cam.videodevice);
return -1;
}
else {
printf("driver:\t\t%s\n", uvc_cam.cap.driver);
printf("card:\t\t%s\n", uvc_cam.cap.card);
printf("bus_info:\t%s\n", uvc_cam.cap.bus_info);
printf("version:\t%d\n", uvc_cam.cap.version);
printf("capabilities:\t%x\n", uvc_cam.cap.capabilities);
if ((uvc_cam.cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == V4L2_CAP_VIDEO_CAPTURE) {
printf("%s: \tsupports capture.\n", uvc_cam.videodevice);
}
if ((uvc_cam.cap.capabilities & V4L2_CAP_STREAMING) == V4L2_CAP_STREAMING) {
printf("%s: \tsupports streaming.\n", uvc_cam.videodevice);
}
}
// 3. set format in
// 3.1 enum fmt
printf("\nSupport format:\n");
memset(&uvc_cam.fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
uvc_cam.fmtdesc.index = 0;
uvc_cam.fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (ioctl(uvc_cam.fd, VIDIOC_ENUM_FMT, &uvc_cam.fmtdesc) != -1) {
printf("\t%d.%s\n", uvc_cam.fmtdesc.index + 1, uvc_cam.fmtdesc.description);
uvc_cam.fmtdesc.index++;
}
// 3.2 set fmt
memset(&uvc_cam.fmt, 0, sizeof(struct v4l2_format));
uvc_cam.fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
uvc_cam.fmt.fmt.pix.width = uvc_cam.width;
uvc_cam.fmt.fmt.pix.height = uvc_cam.height;
uvc_cam.fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
uvc_cam.fmt.fmt.pix.field = V4L2_FIELD_ANY;
ret = ioctl(uvc_cam.fd, VIDIOC_S_FMT, &uvc_cam.fmt);
if (ret < 0) {
printf("Unable to set format\n");
return -1;
}
// 3.3 get fmt
ret = ioctl(uvc_cam.fd, VIDIOC_G_FMT, &uvc_cam.fmt);
if (ret < 0) {
printf("Unable to get format\n");
return -1;
}
else {
printf("\nfmt.type:\t\t%d\n", uvc_cam.fmt.type);
printf("pix.pixelformat:\t%c%c%c%c\n", uvc_cam.fmt.fmt.pix.pixelformat & 0xFF, (uvc_cam.fmt.fmt.pix.pixelformat >> 8) & 0xFF, (uvc_cam.fmt.fmt.pix.pixelformat >> 16) & 0xFF, (uvc_cam.fmt.fmt.pix.pixelformat >> 24) & 0xFF);
printf("pix.height:\t\t%d\n", uvc_cam.fmt.fmt.pix.height);
printf("pix.width:\t\t%d\n", uvc_cam.fmt.fmt.pix.width);
printf("pix.field:\t\t%d\n", uvc_cam.fmt.fmt.pix.field);
}
// 4. set fps
memset(&uvc_cam.setfps, 0, sizeof(struct v4l2_streamparm));
uvc_cam.setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
uvc_cam.setfps.parm.capture.timeperframe.numerator = 1;
uvc_cam.setfps.parm.capture.timeperframe.denominator = 25;
ret = ioctl(uvc_cam.fd, VIDIOC_S_PARM, &uvc_cam.setfps);
if (ret < 0) {
printf("Unable to set frame rate\n");
return -1;
}
else {
printf("set fps OK!\n");
}
ret = ioctl(uvc_cam.fd, VIDIOC_G_PARM, &uvc_cam.setfps);
if (ret < 0) {
printf("Unable to get frame rate\n");
return -1;
}
else {
printf("get fps OK:\n");
printf("timeperframe.numerator : %d\n", uvc_cam.setfps.parm.capture.timeperframe.numerator);
printf("timeperframe.denominator: %d\n", uvc_cam.setfps.parm.capture.timeperframe.denominator);
printf("set fps : %d\n", 1 * uvc_cam.setfps.parm.capture.timeperframe.denominator / uvc_cam.setfps.parm.capture.timeperframe.numerator);
}
// 5. enum framesizes
while (1) {
struct v4l2_fmtdesc fmtdesc;
memset(&fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
fmtdesc.index = i++;
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(uvc_cam.fd, VIDIOC_ENUM_FMT, &fmtdesc) < 0) {
break;
}
printf("Supported format: %s\n", fmtdesc.description);
struct v4l2_frmsizeenum fsenum;
memset(&fsenum, 0, sizeof(struct v4l2_frmsizeenum));
fsenum.pixel_format = uvc_cam.fmtdesc.pixelformat;
int j = 0;
while (1) {
fsenum.index = j;
j++;
if (ioctl(uvc_cam.fd, VIDIOC_ENUM_FRAMESIZES, &fsenum) == 0) {
if (uvc_cam.fmt.fmt.pix.pixelformat == fmtdesc.pixelformat) {
printf("\tSupported size with the current format: %dx%d\n", fsenum.discrete.width, fsenum.discrete.height);
}
else {
printf("\tSupported size: %dx%d\n", fsenum.discrete.width, fsenum.discrete.height);
}
}
else {
break;
}
}
}
// 6. request buffers
memset(&uvc_cam.rb, 0, sizeof(struct v4l2_requestbuffers));
uvc_cam.rb.count = NB_BUFFER;
uvc_cam.rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
uvc_cam.rb.memory = V4L2_MEMORY_MMAP;
ret = ioctl(uvc_cam.fd, VIDIOC_REQBUFS, &uvc_cam.rb);
if (ret < 0) {
printf("Unable to allocate buffers\n");
return -1;
}
// 6.1 map the buffers
for (i = 0; i < NB_BUFFER; i++) {
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.index = i;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(uvc_cam.fd, VIDIOC_QUERYBUF, &buf);
if (ret < 0) {
printf("Unable to query buffer\n");
return -1;
}
uvc_cam.mem[i] = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, uvc_cam.fd, buf.m.offset);
if (uvc_cam.mem[i] == MAP_FAILED) {
printf("Unable to map buffer\n");
return -1;
}
uvc_cam.memlength[i] = buf.length;
}
// 6.2 queue the buffers.
for (i = 0; i < NB_BUFFER; i++) {
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.index = i;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(uvc_cam.fd, VIDIOC_QBUF, &buf);
if (ret < 0) {
printf("Unable to queue buffer\n");
return -1;
}
}
// 7. malloc yuyv buf
uvc_cam.framesizeIn = uvc_cam.width * uvc_cam.height << 1; // w * h * 2
uvc_cam.framebuffer = (unsigned char*)calloc(1, (size_t)uvc_cam.framesizeIn);
if (uvc_cam.framebuffer == NULL) {
printf("err calloc memory\n");
return -1;
}
printf("init %s \t[OK]\n", uvc_cam.videodevice);
return 0;
}
void v4l2_exit(void)
{
free(uvc_cam.framebuffer);
close(uvc_cam.fd);
}
int v4l2_enable(void)
{
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret;
ret = ioctl(uvc_cam.fd, VIDIOC_STREAMON, &type);
if (ret < 0) {
printf("Unable to start capture\n");
return ret;
}
printf("start capture\n");
return 0;
}
int v4l2_disable(void)
{
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret;
ret = ioctl(uvc_cam.fd, VIDIOC_STREAMOFF, &type);
if (ret < 0) {
printf("Unable to stop capture\n");
return ret;
}
printf("stop capture\n");
return 0;
}
int v4l2_uvc_grap(void)
{
int ret;
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(uvc_cam.fd, VIDIOC_DQBUF, &buf);
if (ret < 0) {
printf("Unable to dequeue buffer\n");
exit(1);
}
memcpy(uvc_cam.framebuffer, uvc_cam.mem[buf.index], uvc_cam.framesizeIn);
ioctl(uvc_cam.fd, VIDIOC_QBUF, &buf);
printf("buf index: %d\n", buf.index);
return 0;
}
int save_bmp(char* bmp_name)
{
FILE* fp;
BITMAPFILEHEADER bf;
BITMAPINFOHEADER bi;
printf("save bmp function\n");
fp = fopen(bmp_name, "wb");
if (fp == NULL) {
printf("open errror\n");
return (-1);
}
//Set BITMAPINFOHEADER
memset(&bi, 0, sizeof(BITMAPINFOHEADER));
bi.ciSize = 40;
bi.ciWidth = IMAGEWIDTH;
bi.ciHeight = IMAGEHEIGHT;
bi.ciPlanes = 1;
bi.ciBitCount = 24;
bi.ciSizeImage = IMAGEWIDTH * IMAGEHEIGHT * 3;
//Set BITMAPFILEHEADER
memset(&bf, 0, sizeof(BITMAPFILEHEADER));
bf.cfType[0] = 'B';
bf.cfType[1] = 'M';
bf.cfSize = 54 + bi.ciSizeImage;
bf.cfReserved = 0;
bf.cfoffBits = 54;
fwrite(&bf, 14, 1, fp);
fwrite(&bi, 40, 1, fp);
fwrite(rgb888_buffer, bi.ciSizeImage, 1, fp);
printf("save %s done\n", bmp_name);
fclose(fp);
return 0;
}
int main(int argc, char const* argv[])
{
char vdname[15];
printf("\n----- v4l2 savebmp app start ----- \n");
if (argc < 2) {
printf("need:/dev/videox\n");
printf("like:%s /dev/video1\n", argv[0]);
printf("app exit.\n\n");
exit(1);
}
snprintf(vdname, 12, argv[1]);
memset(&uvc_cam, 0, sizeof(struct vdIn));
uvc_cam.videodevice = vdname;
printf("using: \t\t%s\n", uvc_cam.videodevice);
uvc_cam.width = _UVC_CAM_WIDTH;
uvc_cam.height = _UVC_CAM_HEIGHT;
// 1. init cam
if (v4l2_init() < 0) {
goto app_exit;
}
v4l2_enable();
usleep(5 * 1000);
// 2. grap uvc
v4l2_uvc_grap();
yuyv_to_rgb888();
// 3. save bmp
save_bmp("./uvc_grap.bmp");
app_exit:
printf("app exit.\n\n");
v4l2_exit();
return 0;
}
/*************************** (C) COPYRIGHT 2020 ZYS ************END OF FILE****/
- 运行程序:
# ./av4l2grap-uvc-savebmp-x /dev/video1
----- v4l2 savebmp app start -----
using: /dev/video1
driver: uvcvideo
card: Aoni HD Camera
bus_info: usb-ci_hdrc.1-1.4
version: 262415
capabilities: 84200001
/dev/video1: supports capture.
/dev/video1: supports streaming.
Support format:
1.MJPEG
2.YUV 4:2:2 (YUYV)
fmt.type: 1
pix.pixelformat: YUYV
pix.height: 480
pix.width: 640
pix.field: 1
set fps OK!
get fps OK:
timeperframe.numerator : 1
timeperframe.denominator: 25
set fps : 25
Supported format: MJPEG
Supported size: 640x480
Supported size: 1280x720
Supported size: 1184x656
Supported size: 1024x576
Supported size: 960x720
Supported size: 960x540
Supported size: 864x486
Supported size: 800x600
Supported size: 752x423
Supported size: 640x360
Supported size: 320x240
Supported format: YUV 4:2:2 (YUYV)
Supported size with the current format: 640x480
Supported size with the current format: 1280x720
Supported size with the current format: 1184x656
Supported size with the current format: 1024x576
Supported size with the current format: 960x720
Supported size with the current format: 960x540
Supported size with the current format: 864x486
Supported size with the current format: 800x600
Supported size with the current format: 752x423
Supported size with the current format: 640x360
Supported size with the current format: 320x240
init /dev/video1 [OK]
start capture
buf index: 0
yuyv to rgb888 done
save bmp function
save ./uvc_grap.bmp done
app exit.
- 拍照效果
拍屏幕效果不好,拍其它很清晰。
离线
- 代码纠错
今天测试程序发现拍照后颜色异常,然后发现是一处错误,有点小尴尬。。。
在函数 void yuyv_to_rgb888(void) 里面:
需要把
gbase = 0.34414 * (u - 128) - 0.71414 * (v - 128);
修改成:
gbase = 0.34414 * (u - 128) + 0.71414 * (v - 128);
(减号变成加号~)
离线
V3S驱动OV2640执行VIDIOC_STREAMON启动采集出错了,不知道什么原因
离线