本文内容均為原創,轉載請注明出處。
使用ffmpeg解碼h264資料其實相對使用x264進行視訊編碼是簡單了許多的,因為ffmpeg提供了一個decoding_encoding.c的檔案,這個檔案裡面有簡單的使用ffmpeg進行視訊、音頻編解碼的例子,不過可能有的人也會找不到這個示例,我就講我改造過的這個示例放在這裡,同時加一些解釋。
其中需要注意的的一點我需要在此說明,就是ffmpeg在進行解碼的時候是會考慮要解碼的資料包是否有0x00 00 001這樣的頭的,如果沒有的話,ffmpeg會認為是錯誤的資料包。下面是使用opencv對解碼後的圖像進行顯示,是以還要配置opencv的環境,如果沒有的話,可以注釋掉ShowImage這個函數,然後使用pgm_save這個函數将解碼後的圖像儲存。
下面将我的代碼放在下面,同樣,過程參見代碼注釋,相對來說比較簡單,不在此過多叙述:
static void pgm_save(unsigned char *buf, int wrap, int xsize, int ysize,char*filename)
{
FILE *f;
int i;
f=fopen(filename,"wb");
fprintf(f,"P5\n%d%d\n%d\n",xsize,ysize,255);
for(i=0;i<ysize;i++)
fwrite(buf + i * wrap,1,xsize,f);
fclose(f);
}
//通過查找0x000001或者0x00000001找到下一個資料包的頭部
static int _find_head(unsigned char*buffer, int len)
{
int i;
for(i=512;i<len;i++)
{
if(buffer[i] == 0 && buffer[i+1] == 0 && buffer[i+2] == 0&& buffer[i+3] == 1)
break;
if(buffer[i]== 0 && buffer[i+1] == 0 && buffer[i+2] == 1)
break;
}
if (i ==len)
return0;
if (i ==512)
return0;
return i;
}
//将檔案中的一個資料包轉換成AVPacket類型以便ffmpeg進行解碼
#define FILE_READING_BUFFER (1*1024*1024)
static void build_avpkt(AVPacket *avpkt, FILE *fp)
{
static unsigned charbuffer[1*1024*1024];
static int readptr = 0;
static int writeptr = 0;
intlen,toread;
intnexthead;
if (writeptr- readptr < 200 * 1024)
{
memmove(buffer, &buffer[readptr],writeptr - readptr);
writeptr -= readptr;
readptr = 0;
toread = FILE_READING_BUFFER - writeptr;
len = fread(&buffer[writeptr], 1,toread, fp);
writeptr += len;
}
nexthead = _find_head(&buffer[readptr], writeptr-readptr);
if (nexthead== 0)
{
printf("failedfind next head...\n");
nexthead = writeptr - readptr;
}
avpkt->size = nexthead;
avpkt->data = &buffer[readptr];
readptr += nexthead;
}
static voidvideo_decode_example(const char *outfilename, constchar *filename)
{
AVCodec *codec;
AVCodecContext *c= NULL;
int frame,got_picture, len;
FILE *f, *fout;
AVFrame *picture;
uint8_t inbuf[INBUF_SIZE +FF_INPUT_BUFFER_PADDING_SIZE];
charbuf[1024];
AVPacket avpkt;
av_init_packet(&avpkt);
/* set end ofbuffer to 0 (this ensures that no overreading happens for damaged mpeg streams)*/
memset(inbuf + INBUF_SIZE, 0,FF_INPUT_BUFFER_PADDING_SIZE);
printf("Videodecoding\n");
opts = NULL;
//av_dict_set(&opts,"b", "2.5M", 0);
/* find the h264video decoder */
codec = avcodec_find_decoder(CODEC_ID_H264);
if (!codec){
fprintf(stderr, "codecnot found\n");
return ;
}
c = avcodec_alloc_context3(codec);
picture= avcodec_alloc_frame();
if(codec->capabilities&CODEC_CAP_TRUNCATED)
c->flags|= CODEC_FLAG_TRUNCATED; /* we do not send complete frames */
/* For somecodecs, such as msmpeg4 and mpeg4, width and height
MUST be initialized there because thisinformation is not
available in the bitstream. */
/* open it */
if(avcodec_open2(c, codec, NULL) < 0) {
fprintf(stderr, "couldnot open codec\n");
exit(1);
}
// fout=fopen(outfilename,"wb");
/* the codec givesus the frame size, in samples */
f = fopen(filename, "rb");
if (!f) {
fprintf(stderr, "couldnot open %s\n", filename);
exit(1);
}
//解碼與顯示需要的輔助的資料結構,需要注意的是,AVFrame必須經過alloc才能使用,不然其記憶體的緩存空間指針是空的,程式會崩潰
AVFrame frameRGB;
IplImage *showImage =cvCreateImage(cvSize(352,288),8,3);
avpicture_alloc((AVPicture*)&frameRGB,PIX_FMT_RGB24,352,288);
cvNamedWindow("decode");
frame = 0;
for(;;) {
build_avpkt(&avpkt, f);
if(avpkt.size == 0)
break;
while(avpkt.size > 0) {
len = avcodec_decode_video2(c,picture, &got_picture, &avpkt);//解碼每一幀
if(len < 0) {
fprintf(stderr, "Error while decoding frame %d\n",frame);
break;
}
if(got_picture) {
printf("savingframe %3d\n", frame);
fflush(stdout);
/* thepicture is allocated by the decoder. no need to free it */
//将YUV420格式的圖像轉換成RGB格式所需要的轉換上下文
SwsContext* scxt =sws_getContext(picture->width,picture->height,PIX_FMT_YUV420P,
picture->width,picture->height,PIX_FMT_RGB24,
2,NULL,NULL,NULL);
if(scxt != NULL)
{
sws_scale(scxt,picture->data,picture->linesize,0,c->height,frameRGB.data,frameRGB.linesize);//圖像格式轉換
showImage->imageSize =frameRGB.linesize[0];//指針指派給要顯示的圖像
showImage->imageData = (char *)frameRGB.data[0];
cvShowImage("decode",showImage);//顯示
cvWaitKey(0.5);//設定0.5s顯示一幀,如果不設定由于這是個循環,會導緻看不到顯示出來的圖像
}
//sprintf(buf,outfilename,frame);
//pgm_save(picture->data[0],picture->linesize[0],
//c->width,c->height, buf);
//pgm_save(picture->data[1],picture->linesize[1],
//c->width/2,c->height/2, fout);
//pgm_save(picture->data[2],picture->linesize[2],
//c->width/2,c->height/2, fout);
frame++;
}
avpkt.size -= len;
avpkt.data += len;
}
}
/* some codecs,such as MPEG, transmit the I and P frame with a
latency of one frame. You must do thefollowing to have a
chance to get the last frame of the video */
avpkt.data = NULL;
avpkt.size = 0;
len = avcodec_decode_video2(c, picture,&got_picture, &avpkt);
if(got_picture) {
printf("savinglast frame %3d\n", frame);
fflush(stdout);
/* the pictureis allocated by the decoder. no need to
free it */
sprintf(buf, outfilename, frame);
//pgm_save(picture->data[0],picture->linesize[0],
// c->width, c->height, fout);
pgm_save(picture->data[0],picture->linesize[0],c->width, c->height, fout);
pgm_save(picture->data[1],picture->linesize[1],c->width/2, c->height/2, fout);
pgm_save(picture->data[2],picture->linesize[2],c->width/2, c->height/2, fout);
frame++;
}
fclose(f);
// fclose(fout);
avcodec_close(c);
av_free(c);
av_free(picture);
printf("\n");
}
int main(int argc, char* argv[])
{
avcodec_register_all();//注冊所有的編解碼器,一定要注意,如果沒有這行代碼則會出錯,提示沒有找不到編解碼器
video_decode_example("%3d.pgm","test.264");//可以使用x264編碼出來的264檔案
system("pause");
return 0;
}