嵌入式LINUX视频服务器
/*采用H.264对视频数据进行编码,并且利用网络进行实时传输(UDP)*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "convert.h"
#include "./avc-src-0.14/avc/common/T264.h"
#define SERVER_PORT 8888
//#define MAX_MSG_SIZE 176*144*3
#define MAX_MSG_SIZE 320*240*3
#define WB
T264_t* m_t264;
T264_param_t m_param;
char* m_pSrc;
char* m_pDst;
int* m_lDstSize;
char* m_pPoolData;
//WB
#define USB_VIDEO "/dev/video0"
int cam_fd;
const int bpp = 24;
struct video_mmap cam_mm;/*视频内存映射*/
/*包含摄像头的基本信息,例如设备名称、
支持的最大最小分辨率、信号源信息等*/
struct video_capability cam_cap;
/*亮度、对比度等和voide_mmap中的分辨率*/
struct video_picture cam_pic;
struct video_mbuf cam_mbuf;/*摄像头存储缓冲区的帧信息*/
struct video_window win;/* 设备采集窗口参数*/
char *cam_data = NULL;
int nframe;
static unsigned char cam_yuv[2457616];
void read_video(char *pixels,int w, int h)
{
int ret;
int frame=0;
cam_mm.width = w;
cam_mm.height = h;
/* 对于单帧采集只需设置 frame=0*/
cam_mm.frame = 0;
cam_mm.format=VIDEO_PALETTE_RGB24;
/* 若调用成功,则激活设备真正开始一帧图像的截取,是非阻塞的*/
ret = ioctl(cam_fd,VIDIOCMCAPTURE,&cam_mm);
if( ret<0 ) {
printf("ERROR: VIDIOCMCAPTURE
");
}
/* 函数判断该帧图像是否截取完毕,成功返回表示截取完毕*/
ret = ioctl(cam_fd,VIDIOCSYNC,&frame);
if( ret<0 ) {
printf("ERROR: VIDIOCSYNC
");
}
}
void config_vid_pic()
{
char cfpath[100];
FILE *cf;
int ret;
if (ioctl(cam_fd, VIDIOCGPICT, &cam_pic) < 0) {
printf("ERROR:VIDIOCGPICT
");
}
cam_pic.palette = VIDEO_PALETTE_RGB24;
if( cf==NULL )
{
cam_pic.brightness = 30464;
cam_pic.hue = 111;
cam_pic.colour = 555;
cam_pic.contrast = 43312;
cam_pic.whiteness = 111;
cam_pic.depth = 24;
ret = ioctl( cam_fd, VIDIOCSPICT,&cam_pic ); /*设置摄像头缓冲中voideo_picture信息*/
if( ret<0 ) {
close(cam_fd);
printf("ERROR: VIDIOCSPICT,Can't set video_picture format
");
}
return;
}
// fscanf(cf,"%d",&cam_pic.brightness);
// fscanf(cf,"%d",&cam_pic.hue);
// fscanf(cf,"%d",&cam_pic.colour);
// fscanf(cf,"%d",&cam_pic.contrast);
// fscanf(cf,"%d",&cam_pic.whiteness);
// fclose( cf );
// ret = ioctl( cam_fd, VIDIOCSPICT,&cam_pic ); /*设置摄像头缓冲中voideo_picture信息*/
// if( ret<0 ) {
// close(cam_fd);
// errexit("ERROR: VIDIOCSPICT,Can't set video_picture format
");
// }
}
void init_video(int w,int h,int bpp) /* bpp == bytes per pixel*/
{
int ret;
cam_fd = open( USB_VIDEO, O_RDWR );
if( cam_fd<0 )
printf("Can't open video device
");
ret = ioctl( cam_fd,VIDIOCGCAP,&cam_cap ); /* 摄像头的基本信息*/
if( ret<0 ) {
printf("Can't get device information: VIDIOCGCAP
");
}
printf("Device name:%s
Width:%d ~ %d
Height:%d ~ %d
",cam_cap.name, cam_cap.maxwidth, cam_cap.minwidth, cam_cap.maxheight, cam_cap.minheight);
if( ioctl(cam_fd,VIDIOCGWIN,&win)<0 ) {
printf("ERROR:VIDIOCGWIN
");
}
win.x = 0;
win.y = 0;
win.width=w;
win.height=h;
if (ioctl(cam_fd, VIDIOCSWIN, &win) < 0) {
printf("ERROR:VIDIOCSWIN
");
}
config_vid_pic();
ret = ioctl(cam_fd,VIDIOCGMBUF,&cam_mbuf);/*函数获得摄像头存储缓冲区的帧信息*/
if( ret<0 ) {
printf("ERROR:VIDIOCGMBUF,Can't get video_mbuf
");
}
printf("Frames:%d
",cam_mbuf.frames);
nframe = cam_mbuf.frames;
/*接着把摄像头对应的设备文件映射到内存区*/
cam_data = (char*)mmap(0, cam_mbuf.size, PROT_READ|PROT_WRITE,MAP_SHARED,cam_fd,0); //采用mmap方式共享采集的数据
if( cam_data == MAP_FAILED ) {
printf("ERROR:mmap
");
}
printf("Buffer size:%d
Offset:%d
",cam_mbuf.size,cam_mbuf.offsets[0]);
InitLookupTable();
}
void init_param(T264_param_t* param, const char* file)
{
int total_no;
FILE* fd;
char line[255];
int32_t b;
if (!(fd = fopen(file,"r")))
{
printf("Couldn't open parameter file %s.
", file);
exit(-1);
}
memset(param, 0, sizeof(*param));
fgets(line, 254, fd); sscanf(line,"%d", &b);
if (b != 4)
{
printf("wrong param file version, expect v4.0
");
exit(-1);
}
fgets(line, 254, fd); sscanf(line,"%d", ¶m->width);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->height);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->search_x);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->search_y);
fgets(line, 254, fd); sscanf(line,"%d", &total_no);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->iframe);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->idrframe);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->b_num);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->ref_num);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->enable_rc);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->bitrate);
fgets(line, 254, fd); sscanf(line,"%f", ¶m->framerate);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->qp);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->min_qp);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->max_qp);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->enable_stat);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->disable_filter);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->aspect_ratio);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->video_format);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->luma_coeff_cost);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_INTRA16x16) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_INTRA4x4) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_INTRAININTER) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_HALFPEL) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_QUARTPEL) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_SUBBLOCK) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_FULLSEARCH) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_DIAMONDSEACH) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_FORCEBLOCKSIZE) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_FASTINTERPOLATE) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_SAD) * b;
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_EXTRASUBPELSEARCH) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->flags |= (USE_SCENEDETECT) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_16x16P) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_16x8P) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_8x16P) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_8x8P) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_8x4P) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_4x8P) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_4x4P) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_16x16B) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_16x8B) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_8x16B) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", &b);
param->block_size |= (SEARCH_8x8B) * (!!b);
fgets(line, 254, fd); sscanf(line,"%d", ¶m->cpu);
fgets(line, 254, fd); sscanf(line, "%d", ¶m->cabac);
// fgets(line, 254, fd); sscanf(line,"%s", src_path);
// fgets(line, 254, fd); sscanf(line,"%s", out_path);
// fgets(line, 254, fd); sscanf(line,"%s", rec_path);
// param->rec_name = rec_path;
fclose(fd);
}
void init_encoder()
{
//编码准备
const char* paramfile = "fastspeed.txt";
init_param(&m_param, paramfile);
m_param.direct_flag = 1;
m_t264 = T264_open(&m_param);
m_lDstSize = m_param.height * m_param.width + (m_param.height * m_param.width >> 1);
// m_pSrc = (uint8_t*)T264_malloc(m_lDstSize, CACHE_SIZE);
m_pDst = (uint8_t*)T264_malloc(m_lDstSize, CACHE_SIZE);
m_pPoolData = malloc(m_param.width*m_param.height*3/2);
}
void udps_respon(int sockfd,int w,int h)
{
struct sockaddr_in addrsrc;
struct sockaddr_in addrdst;
int addrlen,n;
int32_t iActualLen;
int row_stride = w*3*h/2;
bzero(&addrdst,sizeof(struct sockaddr_in));
addrdst.sin_family=AF_INET;
addrdst.sin_addr.s_addr=inet_addr("192.168.8.133");
addrdst.sin_port=htons(SERVER_PORT);
while(1)
{
read_video(NULL,w,h);
ConvertRGB2YUV(w,h,cam_data,cam_yuv);//将RGB彩 {MOD}空间转换为YUV
iActualLen = T264_encode(m_t264, cam_yuv, m_pDst, row_stride);//H.264编码
printf("encoded:%d, %d bytes.
",row_stride,iActualLen);
memcpy(m_pPoolData,&m_t264->frame_num,1);
memcpy(m_pPoolData+1, m_pDst, iActualLen);
iActualLen++;
sendto(sockfd,m_pPoolData,iActualLen,0,(struct sockaddr*)&addrdst,sizeof(struct sockaddr_in)); //向客户端发送数据
}
}
void free_dev()
{
printf("free device
");
close(cam_fd);
}
int main(void)
{
int sockfd;
struct sockaddr_in addr;
printf("start 2.0...
");
sockfd=socket(AF_INET,SOCK_DGRAM,0);//建立基于UDP的SOCKET
if(sockfd<0)
{
printf("0-");
printf("Socket Error
");
exit(1);
}
bzero(&addr,sizeof(struct sockaddr_in)); //初始化addr结构
addr.sin_family=AF_INET;
addr.sin_addr.s_addr=htonl(INADDR_ANY); //网络字节序转换
addr.sin_port=htons(SERVER_PORT);
if(bind(sockfd,(struct sockaddr *)&addr,sizeof(struct sockaddr_in))<0 )
{
printf(stderr,"Bind Error:%s
",strerror(errno));
exit(1);
}
init_encoder();
#ifdef WB
atexit( &free_dev );
init_video(m_param.width,m_param.height,bpp);
#endif
udps_respon(sockfd,m_param.width,m_param.height);
close(sockfd);
}