- (int)DecodeH264Frames: (unsigned char*)inputBuffer withLength:(int)aLength
{
int gotPicPtr=;
int result=;
av\_init\_packet(&pAvPackage);
pAvPackage.data=(unsigned char\*)inputBuffer;
pAvPackage.size=aLength;
//解码
result=avcodec\_decode\_video2(pCodecCtx, pVideoFrame, &gotPicPtr, &pAvPackage);
//如果视频尺寸更改,我们丢掉这个frame
if((pictureWidth!=)&&(pictureWidth!=pCodecCtx->width)){
setRecordResolveState=;
pictureWidth=pCodecCtx->width;
return -;
}
//YUV 420 Y U V -> RGB
if(gotPicPtr)
{
unsigned int lumaLength= (pCodecCtx->height)\*(MIN(pVideoFrame->linesize\[\], pCodecCtx->width));// W \* H
unsigned int chromBLength=((pCodecCtx->height)/)\*(MIN(pVideoFrame->linesize\[\], (pCodecCtx->width)/));// w \* h/4
unsigned int chromRLength=((pCodecCtx->height)/)\*(MIN(pVideoFrame->linesize\[\], (pCodecCtx->width)/));
H264YUV\_Frame yuvFrame;
memset(&yuvFrame, , sizeof(H264YUV\_Frame));
yuvFrame.luma.length = lumaLength;
yuvFrame.chromaB.length = chromBLength;
yuvFrame.chromaR.length =chromRLength;
yuvFrame.luma.dataBuffer=(unsigned char\*)malloc(lumaLength);
yuvFrame.chromaB.dataBuffer=(unsigned char\*)malloc(chromBLength);
yuvFrame.chromaR.dataBuffer=(unsigned char\*)malloc(chromRLength);
copyDecodedFrame(pVideoFrame->data\[\],yuvFrame.luma.dataBuffer,pVideoFrame->linesize\[\],
pCodecCtx->width,pCodecCtx->height);
copyDecodedFrame(pVideoFrame->data\[\], yuvFrame.chromaB.dataBuffer,pVideoFrame->linesize\[\],
pCodecCtx->width / ,pCodecCtx->height / );
copyDecodedFrame(pVideoFrame->data\[\], yuvFrame.chromaR.dataBuffer,pVideoFrame->linesize\[\],
pCodecCtx->width / ,pCodecCtx->height / );
yuvFrame.width=pCodecCtx->width;
yuvFrame.height=pCodecCtx->height;
dispatch\_sync(dispatch\_get\_main\_queue(), ^{
\[self updateYUVFrameOnMainThread:(H264YUV\_Frame\*)&yuvFrame\];
});
free(yuvFrame.luma.dataBuffer);
free(yuvFrame.chromaB.dataBuffer);
free(yuvFrame.chromaR.dataBuffer);
}
av\_free\_packet(&pAvPackage);
return ;
}
手机扫一扫
移动阅读更方便
你可能感兴趣的文章