代码走读: FFMPEG-ffplayer02
AVFrame
int attribute_align_arg avcodec_receive_frame(AVCodecContext *avctx, AVFrame *frame)
选取一个音频解码器 和 一个视频解码器分别介绍该解码器功能
音频G722
g722dec.c -> g722_decode_frame
通过 ff_get_buffer 给 传入的 frame 指针分配内存
g722_decode_frame 函数 对应 avcodec_receive_frame
static int g722_decode_frame(AVCodecContext *avctx, void *data,int *got_frame_ptr, AVPacket *avpkt)
{G722Context *c = avctx->priv_data;AVFrame *frame = data;int16_t *out_buf;int j, ret;const int skip = 8 - c->bits_per_codeword;const int16_t *quantizer_table = low_inv_quants[skip];GetBitContext gb;/* get output buffer */frame->nb_samples = avpkt->size * 2;// 给frame 指针中的数据缓存指针,分配内存if ((ret = ff_get_buffer(avctx, frame, 0)) < 0)return ret;out_buf = (int16_t *)frame->data[0];init_get_bits(&gb, avpkt->data, avpkt->size * 8);.........}
ff_get_buffer 函数
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
{int ret = get_buffer_internal(avctx, frame, flags);if (ret < 0) {av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");frame->width = frame->height = 0;}return ret;
}
get_buffer_internal
====>1
static int get_buffer_internal(AVCodecContext *avctx, AVFrame *frame, int flags)
{.....ret = ff_attach_decode_data(frame);.....
}===>2
int ff_attach_decode_data(AVFrame *frame)
{。。。。fdd_buf = av_buffer_create((uint8_t*)fdd, sizeof(*fdd), decode_data_free,NULL, AV_BUFFER_FLAG_READONLY);.....return 0;
}===>3AVBufferRef *av_buffer_create(uint8_t *data, int size,void (*free)(void *opaque, uint8_t *data),void *opaque, int flags)
{AVBufferRef *ref = NULL;AVBuffer *buf = NULL;buf = av_mallocz(sizeof(*buf));if (!buf)return NULL;buf->data = data;buf->size = size;buf->free = free ? free : av_buffer_default_free;buf->opaque = opaque;atomic_init(&buf->refcount, 1);if (flags & AV_BUFFER_FLAG_READONLY)buf->flags |= BUFFER_FLAG_READONLY;ref = av_mallocz(sizeof(*ref));if (!ref) {av_freep(&buf);return NULL;}ref->buffer = buf;ref->data = data;ref->size = size;return ref;
}
视频hevcdecSstar,星辰硬件H265解码
ss_hevc_receive_frame 对应 avcodec_receive_frame
===》1static int ss_hevc_receive_frame(AVCodecContext *avctx, AVFrame *frame)
{got_frame = 0;if (MI_SUCCESS == (ret2 = ss_hevc_get_frame(s, frame))) {got_frame = 1;frame->best_effort_timestamp = frame->pts;}}====>2
static int ss_hevc_get_frame(SsHevcContext *ssctx, AVFrame *frame)
{ret = av_frame_get_buffer(frame, 32);if (MI_SUCCESS == (ret = MI_SYS_ChnOutputPortGetBuf(&stVdecChnPort, &frame_buf->stVdecBufInfo, &frame_buf->stVdecHandle))){if (frame_buf->stVdecBufInfo.eBufType == E_MI_SYS_BUFDATA_FRAME) {frame_buf->bType = FALSE;frame->width = frame_buf->stVdecBufInfo.stFrameData.u16Width;frame->height = frame_buf->stVdecBufInfo.stFrameData.u16Height;} else if (frame_buf->stVdecBufInfo.eBufType == E_MI_SYS_BUFDATA_META) {pstVdecInfo = (mi_vdec_DispFrame_t *)frame_buf->stVdecBufInfo.stMetaData.pVirAddr;frame_buf->bType = TRUE;frame_buf->s32Index = pstVdecInfo->s32Idx;frame->width = pstVdecInfo->stFrmInfo.u16Width;frame->height = pstVdecInfo->stFrmInfo.u16Height;}frame->opaque = (SS_Vdec_BufInfo *)frame_buf;frame->pts = frame_buf->stVdecBufInfo.u64Pts;frame->format = ssctx->format;}
}
frame 释放函数
av_frame_unref
void av_frame_unref(AVFrame *frame)
{int i;if (!frame)return;wipe_side_data(frame);for (i = 0; i < FF_ARRAY_ELEMS(frame->buf); i++)av_buffer_unref(&frame->buf[i]);for (i = 0; i < frame->nb_extended_buf; i++)av_buffer_unref(&frame->extended_buf[i]);av_freep(&frame->extended_buf);av_dict_free(&frame->metadata);
#if FF_API_FRAME_QP
FF_DISABLE_DEPRECATION_WARNINGSav_buffer_unref(&frame->qp_table_buf);
FF_ENABLE_DEPRECATION_WARNINGS
#endifav_buffer_unref(&frame->hw_frames_ctx);av_buffer_unref(&frame->opaque_ref);av_buffer_unref(&frame->private_ref);get_frame_defaults(frame);
}