70,028
社区成员




CC = arm-unknown-linux-gnueabi-gcc
CFLAGS = -Wall -shared -g -I/home/driver_test/jpeg/include
image_get: image_get.c
$(CC) $(CFLAGS) -ljpeg -L/home/driver_test/jpeg/lib -o $@ $^
sudo cp image_get /opt/filesystem/code #/opt/filesystem/code 是在开发板的文件系统上
clean:
$(RM) image_get
.PHONY:clean
#include <stdio.h>
int main()
{
printf("hello\n");
}
也是出现段错误,于是我把makefile里面要加载的jpeg_v8b库那句删掉,结果有输出了,问题就是Makefile加载的库导致,这里头我不懂
原Makefile代码如下:
CC = arm-unknown-linux-gnueabi-gcc
CFLAGS = -Wall -shared -g -I/home/driver_test/jpeg/include
image_get: image_get.c
$(CC) $(CFLAGS) -ljpeg -L/home/driver_test/jpeg/lib -o $@ $^
sudo cp image_get /opt/filesystem/code #/opt/filesystem/code 是在开发板的文件系统上
clean:
$(RM) image_get
.PHONY:clean
[/quote]
先确认你引用库的位置是不是准确的。再确认你交叉编译libjpeg的条件是否完备#include <stdio.h>
int main()
{
printf("hello\n");
}
也是出现段错误,于是我把makefile里面要加载的jpeg_v8b库那句删掉,结果有输出了,问题就是Makefile加载的库导致,这里头我不懂
原Makefile代码如下:
CC = arm-unknown-linux-gnueabi-gcc
CFLAGS = -Wall -shared -g -I/home/driver_test/jpeg/include
image_get: image_get.c
$(CC) $(CFLAGS) -ljpeg -L/home/driver_test/jpeg/lib -o $@ $^
sudo cp image_get /opt/filesystem/code #/opt/filesystem/code 是在开发板的文件系统上
clean:
$(RM) image_get
.PHONY:clean
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <stdlib.h>
#include <linux/types.h>
#include <linux/videodev2.h>
#include <malloc.h>
#include <math.h>
#include <string.h>
#include <sys/mman.h>
#include <linux/fb.h>
#include <execinfo.h>
#include <signal.h>
#include "jpeglib.h"
#include "jerror.h"
#define FILE_VIDEO "/dev/video0"
struct fb_dev
{
//for frame buffer
int fb;
void *fb_mem; //frame buffer mmap
int fb_width, fb_height, fb_line_len, fb_size;
int fb_bpp;
} fbdev;
void dump(int signo)
{
void *array[10];
size_t size;
char **strings;
size_t i;
size = backtrace(array, 10);
strings = backtrace_symbols(array, size);
printf("Obtained %zd stack frames.\n", size);
for(i=0; i<size; i++)
{
printf("%s\n", strings[i]);
}
free(strings);
exit(0);
}
unsigned short RGB888toRGB565(unsigned char red, unsigned char green, unsigned char blue)
{
unsigned short B = (blue >> 3) & 0x001F;
unsigned short G = ((green >> 2) << 5) & 0x07E0;
unsigned short R = ((red >> 3) << 11) & 0xF800;
return (unsigned short) (R | G | B);
}
//释放framebuffer的映射
int fb_munmap(void *start, size_t length)
{
return (munmap(start, length));
}
static int init_lcd()
{
char *framebuffer_ptr;
int framebuffer_fd;
static unsigned long screensize;
static struct fb_fix_screeninfo finfo;
static struct fb_var_screeninfo vinfo;
/*1.打开帧缓冲设备*/
framebuffer_fd =open("/dev/full",O_RDWR);
if(framebuffer_fd<0)
{
printf("Error: failed open framebuffer device!\n");
return -1;
}
/*2.获取固定参数*/
if(ioctl(framebuffer_fd,FBIOGET_FSCREENINFO,&finfo))
{
printf("Error:failed get the framebuffer device`s fix informations!\n");
return -1;
}
/*3.获取可变参数*/
if(ioctl(framebuffer_fd,FBIOGET_VSCREENINFO,&vinfo))
{
printf("Error:failed get the framebuffer device`s var informations!\n");
return -1;
}
screensize =(vinfo.xres *vinfo.yres*vinfo.bits_per_pixel/8);
printf("screensize =%ld\n",screensize);
fbdev.fb_width = vinfo.xres;
fbdev.fb_height = vinfo.yres;
fbdev.fb_bpp = vinfo.bits_per_pixel;
fbdev.fb_line_len = finfo.line_length;
fbdev.fb_size = finfo.smem_len;
/*5.映射*/
framebuffer_ptr=(char *)mmap( NULL, //如果此值为NULL,则表示用内核来自动给你分配一块虚拟空间
screensize, //空间大小
PROT_READ|PROT_WRITE, //权限
MAP_SHARED, //是否可以共享
framebuffer_fd, //文件描述符
0); //从哪个地方开始
if(framebuffer_ptr<0)
{
printf("Error:failed to mmap device mem!\n");
return -1;
}
memset(framebuffer_ptr,0,screensize);
printf("framebuffer_ptr is mmaped ok!\n");
return framebuffer_fd;
}
int fb_pixel(void *fbmem, int width, int height, int x, int y, unsigned short color)
{
if ((x > width) || (y > height))
return (-1);
unsigned short *dst = ((unsigned short *) fbmem + y * width + x);
*dst = color;
return 0;
}
int main()
{
signal(SIGSEGV, &dump);
int fd;
static struct v4l2_fmtdesc fmt;
static struct v4l2_capability cap;
static struct v4l2_format tv4l2_format;
static struct v4l2_requestbuffers tV4L2_reqbuf;
static struct v4l2_buffer tV4L2buf;
enum v4l2_buf_type v4l2type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
static fd_set fds ;
static struct timeval tv;
int jpgsize=25600;
unsigned char *buffer=NULL,*p;
int i, j;
//Open video device
fd = open(FILE_VIDEO, O_RDWR);
if (fd < 0)
perror(FILE_VIDEO);
//获取支持的视频格式
memset(&fmt, 0, sizeof(fmt));
fmt.index = 0;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (ioctl(fd, VIDIOC_ENUM_FMT, &fmt) == 0) {
fmt.index++;
printf("pixelformat = ''%c%c%c%c''\ndescription = ''%s''\n",fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,(fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,fmt.description);
}
//Get capabilities
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
perror("VIDIOGCAP");
printf("(" FILE_VIDEO " not a video4linux device?)\n");
close(fd);
}
printf("capabilities-->%x\n",cap.capabilities);
//设置视频格式
tv4l2_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tv4l2_format.fmt.pix.width = 480;
tv4l2_format.fmt.pix.height = 240;
tv4l2_format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG;
tv4l2_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (ioctl(fd, VIDIOC_S_FMT, &tv4l2_format)< 0) {
printf("VIDIOC_S_FMT\n");
close(fd);
}
//请求V4L2驱动分配视频缓冲区(申请V4L2视频驱动分配内存)
memset(&tV4L2_reqbuf, 0, sizeof(struct v4l2_requestbuffers ));
tV4L2_reqbuf.count = 1; //申请缓冲区的个数
tV4L2_reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4L2_reqbuf.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_REQBUFS, &tV4L2_reqbuf)< 0) {
perror("VIDIOC_REQBUFS");
close(fd);
}
//查询已经分配的V4L2的视频缓冲区的相关信息,包括视频缓冲区的使用状态、在内核空间的偏移地址、缓冲区长度等。在应用程序设计中通过调VIDIOC_QUERYBUF来获取内核空间的视频缓冲区信息,然后调用函数mmap把内核空间地址映射到用户空间,这样应用程序才能够访问位于内核空间的视频缓冲区。
memset(&tV4L2buf, 0, sizeof(struct v4l2_buffer));
tV4L2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4L2buf.memory = V4L2_MEMORY_MMAP;
tV4L2buf.index = 0; // 要获取内核视频缓冲区的信息编号
if (ioctl(fd, VIDIOC_QUERYBUF, &tV4L2buf)< 0) {
perror("VIDIOC_QUERYBUF");
close(fd);
}
// 把内核空间缓冲区映射到用户空间缓冲区
buffer = mmap(NULL,tV4L2buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,fd,tV4L2buf.m.offset);
//投放一个空的视频缓冲区到视频缓冲区输入队列中
if (ioctl(fd,VIDIOC_QBUF, &tV4L2buf)< 0) {
perror("VIDIOC_QBUF");
close(fd);
}
//启动视频采集命令,应用程序调用VIDIOC_STREAMON启动视频采集命令后,视频设备驱动程序开始采集视频数据,并把采集到的视频数据保存到视频驱动的视频缓冲区中
if (ioctl(fd, VIDIOC_STREAMON, &v4l2type)< 0) {
perror("VIDIOC_STREAMON");
close(fd);
}
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //数据流类型,永远都是V4L2_BUF_TYPE_VIDEO_CAPTURE
if (ioctl(fd, VIDIOC_G_FMT, &tv4l2_format) < 0) //读取视频源格式
{
printf("get format failed\n");
return -1;
}
else
{
printf("Picture:Width = %d Height = %d\n", tv4l2_format.fmt.pix.width, tv4l2_format.fmt.pix.height);
}
/**********************************************************************/
int lcd_fd;
char *jpgname = "image";
unsigned char *jpg_buffer = NULL;
FILE *fd_y_file = 0;
FILE *infile = 0;
static struct jpeg_decompress_struct cinfo;
static struct jpeg_error_mgr jerr;
unsigned int x;
unsigned int y;
printf("%s() %d\n", __func__, __LINE__);
/* 初始化lcd */
lcd_fd=init_lcd();
printf("frame buffer: %dx%d, %dbpp, 0x%xbyte= %d\n",
fbdev.fb_width, fbdev.fb_height, fbdev.fb_bpp, fbdev.fb_size, fbdev.fb_size);
fbdev.fb_mem = mmap (NULL, fbdev.fb_size, PROT_READ|PROT_WRITE,MAP_SHARED,lcd_fd,0);
fbdev.fb = lcd_fd;
printf("%s() %d\n", __func__, __LINE__);
while (1)
{
//如果把处理JPEG格式的数据和显示程序分离,把处理 一个新的线程,预览时会更加流畅。
FD_ZERO(&fds);
FD_SET(fd, &fds);
FD_SET(lcd_fd, &fds);
tv.tv_sec = 2; // Timeout
tv.tv_usec = 0;
select(lcd_fd+ 1, &fds, NULL, NULL, &tv);
char s[15];
sprintf(s, "%s.jpg", jpgname);
if ((fd_y_file = fopen(s, "wb")) < 0)
{
printf("Unable to create y frame recording file\n");
return -1;
}
tV4L2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //取得原始采集数据
tV4L2buf.memory = V4L2_MEMORY_MMAP; //存储类型:V4L2_MEMORY_MMAP(内存映射)或V4L2_MEMORY_USERPTR(用户指针)
if (ioctl(lcd_fd, VIDIOC_DQBUF, tV4L2buf) < 0)
{
perror("VIDIOC_DQBUF failed.\n");
return -1;
}
i=0;p=buffer;
while(!((*p==0xff)&&(*(p+1)==0xd9)))//计算jpg文件大小,jpg以0xffd9结尾
{
i++;
p++;
}
j=0;
while(!((*p == 0x000000FF) && (*(p+1) == 0x000000C4)))
{
j++;
p++;
if(j == i)
{
printf("huffman table don't exist! \n");
break;
}
}
jpgsize=i+2;
printf("jpgsiez:%d\n",jpgsize);
fwrite(buffer, jpgsize, 1, fd_y_file); //开始向LCD发送数据显示采集到的图像
fclose(fd_y_file);
if ((infile = fopen(s, "rb")) == NULL)
{
fprintf(stderr, "open %s failed\n", s);
exit(-1);
}
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_decompress(&cinfo);
//导入要解压的Jpeg文件infile
jpeg_stdio_src(&cinfo, infile);
//读取jpeg文件的文件头
jpeg_read_header(&cinfo, TRUE);
//开始解压Jpeg文件,解压后将分配给scanline缓冲区,
jpeg_start_decompress(&cinfo);
jpg_buffer = (unsigned char *) malloc(cinfo.output_width
* cinfo.output_components);
y = 0;
while (cinfo.output_scanline < cinfo.output_height)
{
jpeg_read_scanlines(&cinfo, &jpg_buffer, 1);
if (fbdev.fb_bpp == 16)
{
unsigned short color;
for (x = 0; x < cinfo.output_width; x++)
{
color = RGB888toRGB565(jpg_buffer[x * 3],
jpg_buffer[x * 3 + 1], jpg_buffer[x * 3 + 2]);
fb_pixel(fbdev.fb_mem, fbdev.fb_width, fbdev.fb_height, x, y, color);
}
}
else if (fbdev.fb_bpp == 24)
{
memcpy((unsigned char *)fbdev.fb_mem + y * fbdev.fb_width * 3, buffer,
cinfo.output_width * cinfo.output_components);
}
y++; //下一个scanline
}
//完成Jpeg解码,释放Jpeg文件
jpeg_finish_decompress(&cinfo);
jpeg_destroy_decompress(&cinfo);
//释放帧缓冲区
free(buffer);
//关闭Jpeg输入文件
fclose(infile);
//获取下一帧视频数据
if (ioctl(fd, VIDIOC_QBUF, &tV4L2buf) < 0)
{
printf("VIDIOC_QBUF error\n");
return -1;
}
}
fb_munmap(fbdev.fb_mem, fbdev.fb_size); //释放framebuffer映射
close(lcd_fd); //关闭Framebuffer设备
close(fd);
return 0;
}
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <stdlib.h>
#include <linux/types.h>
#include <linux/videodev2.h>
#include <malloc.h>
#include <math.h>
#include <string.h>
#include <sys/mman.h>
#include <linux/fb.h>
#include <execinfo.h>
#include <signal.h>
#include "jpeglib.h"
#include "jerror.h"
#define FILE_VIDEO "/dev/video0"
struct fb_dev
{
//for frame buffer
int fb;
void *fb_mem; //frame buffer mmap
int fb_width, fb_height, fb_line_len, fb_size;
int fb_bpp;
} fbdev;
void dump(int signo)
{
void *array[10];
size_t size;
char **strings;
size_t i;
size = backtrace(array, 10);
strings = backtrace_symbols(array, size);
printf("Obtained %zd stack frames.\n", size);
for(i=0; i<size; i++)
{
printf("%s\n", strings[i]);
}
free(strings);
exit(0);
}
unsigned short RGB888toRGB565(unsigned char red, unsigned char green, unsigned char blue)
{
unsigned short B = (blue >> 3) & 0x001F;
unsigned short G = ((green >> 2) << 5) & 0x07E0;
unsigned short R = ((red >> 3) << 11) & 0xF800;
return (unsigned short) (R | G | B);
}
//释放framebuffer的映射
int fb_munmap(void *start, size_t length)
{
return (munmap(start, length));
}
static int init_lcd()
{
char *framebuffer_ptr;
int framebuffer_fd;
static unsigned long screensize;
struct fb_fix_screeninfo finfo;
struct fb_var_screeninfo vinfo;
/*1.打开帧缓冲设备*/
framebuffer_fd =open("/dev/full",O_RDWR);
if(framebuffer_fd<0)
{
printf("Error: failed open framebuffer device!\n");
return -1;
}
/*2.获取固定参数*/
if(ioctl(framebuffer_fd,FBIOGET_FSCREENINFO,&finfo))
{
printf("Error:failed get the framebuffer device`s fix informations!\n");
return -1;
}
/*3.获取可变参数*/
if(ioctl(framebuffer_fd,FBIOGET_VSCREENINFO,&vinfo))
{
printf("Error:failed get the framebuffer device`s var informations!\n");
return -1;
}
screensize =(vinfo.xres *vinfo.yres*vinfo.bits_per_pixel/8);
printf("screensize =%ld\n",screensize);
fbdev.fb_width = vinfo.xres;
fbdev.fb_height = vinfo.yres;
fbdev.fb_bpp = vinfo.bits_per_pixel;
fbdev.fb_line_len = finfo.line_length;
fbdev.fb_size = finfo.smem_len;
/*5.映射*/
framebuffer_ptr=(char *)mmap( NULL, //如果此值为NULL,则表示用内核来自动给你分配一块虚拟空间
screensize, //空间大小
PROT_READ|PROT_WRITE, //权限
MAP_SHARED, //是否可以共享
framebuffer_fd, //文件描述符
0); //从哪个地方开始
if(framebuffer_ptr<0)
{
printf("Error:failed to mmap device mem!\n");
return -1;
}
memset(framebuffer_ptr,0,screensize);
printf("framebuffer_ptr is mmaped ok!\n");
return framebuffer_fd;
}
int fb_pixel(void *fbmem, int width, int height, int x, int y, unsigned short color)
{
if ((x > width) || (y > height))
return (-1);
unsigned short *dst = ((unsigned short *) fbmem + y * width + x);
*dst = color;
return 0;
}
int main()
{
signal(SIGSEGV, &dump);
int fd;
struct v4l2_fmtdesc fmt;
struct v4l2_capability cap;
struct v4l2_format tv4l2_format;
struct v4l2_requestbuffers tV4L2_reqbuf;
struct v4l2_buffer tV4L2buf;
enum v4l2_buf_type v4l2type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fd_set fds ;
struct timeval tv;
int jpgsize=25600;
unsigned char *buffer=NULL,*p;
int i, j;
//Open video device
fd = open(FILE_VIDEO, O_RDWR);
if (fd < 0)
perror(FILE_VIDEO);
//获取支持的视频格式
memset(&fmt, 0, sizeof(fmt));
fmt.index = 0;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (ioctl(fd, VIDIOC_ENUM_FMT, &fmt) == 0) {
fmt.index++;
printf("pixelformat = ''%c%c%c%c''\ndescription = ''%s''\n",fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,(fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,fmt.description);
}
//Get capabilities
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
perror("VIDIOGCAP");
printf("(" FILE_VIDEO " not a video4linux device?)\n");
close(fd);
}
printf("capabilities-->%x\n",cap.capabilities);
//设置视频格式
tv4l2_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tv4l2_format.fmt.pix.width = 480;
tv4l2_format.fmt.pix.height = 240;
tv4l2_format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG;
tv4l2_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (ioctl(fd, VIDIOC_S_FMT, &tv4l2_format)< 0) {
printf("VIDIOC_S_FMT\n");
close(fd);
}
//请求V4L2驱动分配视频缓冲区(申请V4L2视频驱动分配内存)
memset(&tV4L2_reqbuf, 0, sizeof(struct v4l2_requestbuffers ));
tV4L2_reqbuf.count = 1; //申请缓冲区的个数
tV4L2_reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4L2_reqbuf.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_REQBUFS, &tV4L2_reqbuf)< 0) {
perror("VIDIOC_REQBUFS");
close(fd);
}
//查询已经分配的V4L2的视频缓冲区的相关信息,包括视频缓冲区的使用状态、在内核空间的偏移地址、缓冲区长度等。在应用程序设计中通过调VIDIOC_QUERYBUF来获取内核空间的视频缓冲区信息,然后调用函数mmap把内核空间地址映射到用户空间,这样应用程序才能够访问位于内核空间的视频缓冲区。
memset(&tV4L2buf, 0, sizeof(struct v4l2_buffer));
tV4L2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4L2buf.memory = V4L2_MEMORY_MMAP;
tV4L2buf.index = 0; // 要获取内核视频缓冲区的信息编号
if (ioctl(fd, VIDIOC_QUERYBUF, &tV4L2buf)< 0) {
perror("VIDIOC_QUERYBUF");
close(fd);
}
// 把内核空间缓冲区映射到用户空间缓冲区
buffer = mmap(NULL,tV4L2buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,fd,tV4L2buf.m.offset);
//投放一个空的视频缓冲区到视频缓冲区输入队列中
if (ioctl(fd,VIDIOC_QBUF, &tV4L2buf)< 0) {
perror("VIDIOC_QBUF");
close(fd);
}
//启动视频采集命令,应用程序调用VIDIOC_STREAMON启动视频采集命令后,视频设备驱动程序开始采集视频数据,并把采集到的视频数据保存到视频驱动的视频缓冲区中
if (ioctl(fd, VIDIOC_STREAMON, &v4l2type)< 0) {
perror("VIDIOC_STREAMON");
close(fd);
}
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //数据流类型,永远都是V4L2_BUF_TYPE_VIDEO_CAPTURE
if (ioctl(fd, VIDIOC_G_FMT, &tv4l2_format) < 0) //读取视频源格式
{
printf("get format failed\n");
return -1;
}
else
{
printf("Picture:Width = %d Height = %d\n", tv4l2_format.fmt.pix.width, tv4l2_format.fmt.pix.height);
}
/**********************************************************************/
int lcd_fd;
char *jpgname = "image";
unsigned char *jpg_buffer = NULL;
FILE *fd_y_file = 0;
FILE *infile = 0;
struct jpeg_decompress_struct cinfo;
struct jpeg_error_mgr jerr;
unsigned int x;
unsigned int y;
printf("%s() %d\n", __func__, __LINE__);
/* 初始化lcd */
lcd_fd=init_lcd();
printf("frame buffer: %dx%d, %dbpp, 0x%xbyte= %d\n",
fbdev.fb_width, fbdev.fb_height, fbdev.fb_bpp, fbdev.fb_size, fbdev.fb_size);
fbdev.fb_mem = mmap (NULL, fbdev.fb_size, PROT_READ|PROT_WRITE,MAP_SHARED,lcd_fd,0);
fbdev.fb = lcd_fd;
printf("%s() %d\n", __func__, __LINE__);
while (1)
{
//如果把处理JPEG格式的数据和显示程序分离,把处理 一个新的线程,预览时会更加流畅。
FD_ZERO(&fds);
FD_SET(fd, &fds);
FD_SET(lcd_fd, &fds);
tv.tv_sec = 2; // Timeout
tv.tv_usec = 0;
select(lcd_fd+ 1, &fds, NULL, NULL, &tv);
char s[15];
sprintf(s, "%s.jpg", jpgname);
if ((fd_y_file = fopen(s, "wb")) < 0)
{
printf("Unable to create y frame recording file\n");
return -1;
}
tV4L2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //取得原始采集数据
tV4L2buf.memory = V4L2_MEMORY_MMAP; //存储类型:V4L2_MEMORY_MMAP(内存映射)或V4L2_MEMORY_USERPTR(用户指针)
if (ioctl(lcd_fd, VIDIOC_DQBUF, tV4L2buf) < 0)
{
perror("VIDIOC_DQBUF failed.\n");
return -1;
}
i=0;p=buffer;
while(!((*p==0xff)&&(*(p+1)==0xd9)))//计算jpg文件大小,jpg以0xffd9结尾
{
i++;
p++;
}
j=0;
while(!((*p == 0x000000FF) && (*(p+1) == 0x000000C4)))
{
j++;
p++;
if(j == i)
{
printf("huffman table don't exist! \n");
break;
}
}
jpgsize=i+2;
printf("jpgsiez:%d\n",jpgsize);
fwrite(buffer, jpgsize, 1, fd_y_file); //开始向LCD发送数据显示采集到的图像
fclose(fd_y_file);
if ((infile = fopen(s, "rb")) == NULL)
{
fprintf(stderr, "open %s failed\n", s);
exit(-1);
}
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_decompress(&cinfo);
//导入要解压的Jpeg文件infile
jpeg_stdio_src(&cinfo, infile);
//读取jpeg文件的文件头
jpeg_read_header(&cinfo, TRUE);
//开始解压Jpeg文件,解压后将分配给scanline缓冲区,
jpeg_start_decompress(&cinfo);
jpg_buffer = (unsigned char *) malloc(cinfo.output_width
* cinfo.output_components);
y = 0;
while (cinfo.output_scanline < cinfo.output_height)
{
jpeg_read_scanlines(&cinfo, &jpg_buffer, 1);
if (fbdev.fb_bpp == 16)
{
unsigned short color;
for (x = 0; x < cinfo.output_width; x++)
{
color = RGB888toRGB565(jpg_buffer[x * 3],
jpg_buffer[x * 3 + 1], jpg_buffer[x * 3 + 2]);
fb_pixel(fbdev.fb_mem, fbdev.fb_width, fbdev.fb_height, x, y, color);
}
}
else if (fbdev.fb_bpp == 24)
{
memcpy((unsigned char *)fbdev.fb_mem + y * fbdev.fb_width * 3, buffer,
cinfo.output_width * cinfo.output_components);
}
y++; //下一个scanline
}
//完成Jpeg解码,释放Jpeg文件
jpeg_finish_decompress(&cinfo);
jpeg_destroy_decompress(&cinfo);
//释放帧缓冲区
free(buffer);
//关闭Jpeg输入文件
fclose(infile);
//获取下一帧视频数据
if (ioctl(fd, VIDIOC_QBUF, &tV4L2buf) < 0)
{
printf("VIDIOC_QBUF error\n");
return -1;
}
}
fb_munmap(fbdev.fb_mem, fbdev.fb_size); //释放framebuffer映射
close(lcd_fd); //关闭Framebuffer设备
close(fd);
return 0;
}
CC = arm-unknown-linux-gnueabi-gcc
CFLAGS = -Wall -g
image_get: image_get.c
$(CC) $(CFLAGS) -o $@ $^
sudo cp image_get /opt/filesystem/code #/opt/filesystem/code 是在开发板的文件系统上
clean:
$(RM) image_get
.PHONY:clean