summaryrefslogtreecommitdiff
path: root/libavcodec/mpeg4videodec.c
diff options
context:
space:
mode:
authorDiego Biurrun <diego@biurrun.de>2011-07-06 20:08:30 +0200
committerDiego Biurrun <diego@biurrun.de>2011-07-11 00:19:00 +0200
commit657ccb5ac75ce34e62bd67f228d9bd36db72189e (patch)
treee490b0d6b4ff93490f70d4f67a0551dd187147e3 /libavcodec/mpeg4videodec.c
parent142e76f1055de5dde44696e71a5f63f2cb11dedf (diff)
Eliminate FF_COMMON_FRAME macro.
FF_COMMON_FRAME holds the contents of the AVFrame structure and is also copied to struct Picture. Replace by an embedded AVFrame structure in struct Picture.
Diffstat (limited to 'libavcodec/mpeg4videodec.c')
-rw-r--r--libavcodec/mpeg4videodec.c83
1 files changed, 42 insertions, 41 deletions
diff --git a/libavcodec/mpeg4videodec.c b/libavcodec/mpeg4videodec.c
index 22d7ace789..df0191dc9f 100644
--- a/libavcodec/mpeg4videodec.c
+++ b/libavcodec/mpeg4videodec.c
@@ -55,7 +55,7 @@ void mpeg4_pred_ac(MpegEncContext * s, DCTELEM *block, int n,
{
int i;
int16_t *ac_val, *ac_val1;
- int8_t * const qscale_table= s->current_picture.qscale_table;
+ int8_t * const qscale_table = s->current_picture.f.qscale_table;
/* find prediction */
ac_val = s->ac_val[0][0] + s->block_index[n] * 16;
@@ -376,7 +376,7 @@ int mpeg4_decode_video_packet_header(MpegEncContext *s)
if(s->pict_type == AV_PICTURE_TYPE_B){
int mb_x = 0, mb_y = 0;
- while(s->next_picture.mbskip_table[ s->mb_index2xy[ mb_num ] ]) {
+ while (s->next_picture.f.mbskip_table[s->mb_index2xy[mb_num]]) {
if (!mb_x) ff_thread_await_progress((AVFrame*)s->next_picture_ptr, mb_y++, 0);
mb_num++;
if (++mb_x == s->mb_width) mb_x = 0;
@@ -570,13 +570,13 @@ static int mpeg4_decode_partition_a(MpegEncContext *s){
}while(cbpc == 8);
s->cbp_table[xy]= cbpc & 3;
- s->current_picture.mb_type[xy]= MB_TYPE_INTRA;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA;
s->mb_intra = 1;
if(cbpc & 4) {
ff_set_qscale(s, s->qscale + quant_tab[get_bits(&s->gb, 2)]);
}
- s->current_picture.qscale_table[xy]= s->qscale;
+ s->current_picture.f.qscale_table[xy]= s->qscale;
s->mbintra_table[xy]= 1;
for(i=0; i<6; i++){
@@ -592,7 +592,7 @@ static int mpeg4_decode_partition_a(MpegEncContext *s){
s->pred_dir_table[xy]= dir;
}else{ /* P/S_TYPE */
int mx, my, pred_x, pred_y, bits;
- int16_t * const mot_val= s->current_picture.motion_val[0][s->block_index[0]];
+ int16_t * const mot_val = s->current_picture.f.motion_val[0][s->block_index[0]];
const int stride= s->b8_stride*2;
try_again:
@@ -604,11 +604,11 @@ try_again:
if(bits&0x10000){
/* skip mb */
if(s->pict_type==AV_PICTURE_TYPE_S && s->vol_sprite_usage==GMC_SPRITE){
- s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_GMC | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_GMC | MB_TYPE_L0;
mx= get_amv(s, 0);
my= get_amv(s, 1);
}else{
- s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
mx=my=0;
}
mot_val[0 ]= mot_val[2 ]=
@@ -634,7 +634,7 @@ try_again:
s->mb_intra = ((cbpc & 4) != 0);
if(s->mb_intra){
- s->current_picture.mb_type[xy]= MB_TYPE_INTRA;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA;
s->mbintra_table[xy]= 1;
mot_val[0 ]= mot_val[2 ]=
mot_val[0+stride]= mot_val[2+stride]= 0;
@@ -660,11 +660,11 @@ try_again:
my = h263_decode_motion(s, pred_y, s->f_code);
if (my >= 0xffff)
return -1;
- s->current_picture.mb_type[xy]= MB_TYPE_16x16 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
} else {
mx = get_amv(s, 0);
my = get_amv(s, 1);
- s->current_picture.mb_type[xy]= MB_TYPE_16x16 | MB_TYPE_GMC | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_GMC | MB_TYPE_L0;
}
mot_val[0 ]= mot_val[2 ] =
@@ -673,7 +673,7 @@ try_again:
mot_val[1+stride]= mot_val[3+stride]= my;
} else {
int i;
- s->current_picture.mb_type[xy]= MB_TYPE_8x8 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_8x8 | MB_TYPE_L0;
for(i=0;i<4;i++) {
int16_t *mot_val= h263_pred_motion(s, i, 0, &pred_x, &pred_y);
mx = h263_decode_motion(s, pred_x, s->f_code);
@@ -725,9 +725,9 @@ static int mpeg4_decode_partition_b(MpegEncContext *s, int mb_count){
}
s->cbp_table[xy]|= cbpy<<2;
- s->current_picture.mb_type[xy] |= ac_pred*MB_TYPE_ACPRED;
+ s->current_picture.f.mb_type[xy] |= ac_pred*MB_TYPE_ACPRED;
}else{ /* P || S_TYPE */
- if(IS_INTRA(s->current_picture.mb_type[xy])){
+ if (IS_INTRA(s->current_picture.f.mb_type[xy])) {
int dir=0,i;
int ac_pred = get_bits1(&s->gb);
int cbpy = get_vlc2(&s->gb, ff_h263_cbpy_vlc.table, CBPY_VLC_BITS, 1);
@@ -740,7 +740,7 @@ static int mpeg4_decode_partition_b(MpegEncContext *s, int mb_count){
if(s->cbp_table[xy] & 8) {
ff_set_qscale(s, s->qscale + quant_tab[get_bits(&s->gb, 2)]);
}
- s->current_picture.qscale_table[xy]= s->qscale;
+ s->current_picture.f.qscale_table[xy] = s->qscale;
for(i=0; i<6; i++){
int dc_pred_dir;
@@ -754,10 +754,10 @@ static int mpeg4_decode_partition_b(MpegEncContext *s, int mb_count){
}
s->cbp_table[xy]&= 3; //remove dquant
s->cbp_table[xy]|= cbpy<<2;
- s->current_picture.mb_type[xy] |= ac_pred*MB_TYPE_ACPRED;
+ s->current_picture.f.mb_type[xy] |= ac_pred*MB_TYPE_ACPRED;
s->pred_dir_table[xy]= dir;
- }else if(IS_SKIP(s->current_picture.mb_type[xy])){
- s->current_picture.qscale_table[xy]= s->qscale;
+ } else if (IS_SKIP(s->current_picture.f.mb_type[xy])) {
+ s->current_picture.f.qscale_table[xy] = s->qscale;
s->cbp_table[xy]= 0;
}else{
int cbpy = get_vlc2(&s->gb, ff_h263_cbpy_vlc.table, CBPY_VLC_BITS, 1);
@@ -770,7 +770,7 @@ static int mpeg4_decode_partition_b(MpegEncContext *s, int mb_count){
if(s->cbp_table[xy] & 8) {
ff_set_qscale(s, s->qscale + quant_tab[get_bits(&s->gb, 2)]);
}
- s->current_picture.qscale_table[xy]= s->qscale;
+ s->current_picture.f.qscale_table[xy] = s->qscale;
s->cbp_table[xy]&= 3; //remove dquant
s->cbp_table[xy]|= (cbpy^0xf)<<2;
@@ -1091,20 +1091,20 @@ static int mpeg4_decode_partitioned_mb(MpegEncContext *s, DCTELEM block[6][64])
int cbp, mb_type;
const int xy= s->mb_x + s->mb_y*s->mb_stride;
- mb_type= s->current_picture.mb_type[xy];
+ mb_type = s->current_picture.f.mb_type[xy];
cbp = s->cbp_table[xy];
s->use_intra_dc_vlc= s->qscale < s->intra_dc_threshold;
- if(s->current_picture.qscale_table[xy] != s->qscale){
- ff_set_qscale(s, s->current_picture.qscale_table[xy] );
+ if (s->current_picture.f.qscale_table[xy] != s->qscale) {
+ ff_set_qscale(s, s->current_picture.f.qscale_table[xy]);
}
if (s->pict_type == AV_PICTURE_TYPE_P || s->pict_type==AV_PICTURE_TYPE_S) {
int i;
for(i=0; i<4; i++){
- s->mv[0][i][0] = s->current_picture.motion_val[0][ s->block_index[i] ][0];
- s->mv[0][i][1] = s->current_picture.motion_val[0][ s->block_index[i] ][1];
+ s->mv[0][i][0] = s->current_picture.f.motion_val[0][s->block_index[i]][0];
+ s->mv[0][i][1] = s->current_picture.f.motion_val[0][s->block_index[i]][1];
}
s->mb_intra = IS_INTRA(mb_type);
@@ -1122,7 +1122,7 @@ static int mpeg4_decode_partitioned_mb(MpegEncContext *s, DCTELEM block[6][64])
s->mb_skipped = 1;
}
}else if(s->mb_intra){
- s->ac_pred = IS_ACPRED(s->current_picture.mb_type[xy]);
+ s->ac_pred = IS_ACPRED(s->current_picture.f.mb_type[xy]);
}else if(!s->mb_intra){
// s->mcsel= 0; //FIXME do we need to init that
@@ -1135,7 +1135,7 @@ static int mpeg4_decode_partitioned_mb(MpegEncContext *s, DCTELEM block[6][64])
}
} else { /* I-Frame */
s->mb_intra = 1;
- s->ac_pred = IS_ACPRED(s->current_picture.mb_type[xy]);
+ s->ac_pred = IS_ACPRED(s->current_picture.f.mb_type[xy]);
}
if (!IS_SKIP(mb_type)) {
@@ -1188,14 +1188,14 @@ static int mpeg4_decode_mb(MpegEncContext *s,
s->mv_dir = MV_DIR_FORWARD;
s->mv_type = MV_TYPE_16X16;
if(s->pict_type==AV_PICTURE_TYPE_S && s->vol_sprite_usage==GMC_SPRITE){
- s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_GMC | MB_TYPE_16x16 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_GMC | MB_TYPE_16x16 | MB_TYPE_L0;
s->mcsel=1;
s->mv[0][0][0]= get_amv(s, 0);
s->mv[0][0][1]= get_amv(s, 1);
s->mb_skipped = 0;
}else{
- s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
s->mcsel=0;
s->mv[0][0][0] = 0;
s->mv[0][0][1] = 0;
@@ -1230,7 +1230,7 @@ static int mpeg4_decode_mb(MpegEncContext *s,
s->mv_dir = MV_DIR_FORWARD;
if ((cbpc & 16) == 0) {
if(s->mcsel){
- s->current_picture.mb_type[xy]= MB_TYPE_GMC | MB_TYPE_16x16 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_GMC | MB_TYPE_16x16 | MB_TYPE_L0;
/* 16x16 global motion prediction */
s->mv_type = MV_TYPE_16X16;
mx= get_amv(s, 0);
@@ -1238,7 +1238,7 @@ static int mpeg4_decode_mb(MpegEncContext *s,
s->mv[0][0][0] = mx;
s->mv[0][0][1] = my;
}else if((!s->progressive_sequence) && get_bits1(&s->gb)){
- s->current_picture.mb_type[xy]= MB_TYPE_16x8 | MB_TYPE_L0 | MB_TYPE_INTERLACED;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_16x8 | MB_TYPE_L0 | MB_TYPE_INTERLACED;
/* 16x8 field motion prediction */
s->mv_type= MV_TYPE_FIELD;
@@ -1260,7 +1260,7 @@ static int mpeg4_decode_mb(MpegEncContext *s,
s->mv[0][i][1] = my;
}
}else{
- s->current_picture.mb_type[xy]= MB_TYPE_16x16 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
/* 16x16 motion prediction */
s->mv_type = MV_TYPE_16X16;
h263_pred_motion(s, 0, 0, &pred_x, &pred_y);
@@ -1277,7 +1277,7 @@ static int mpeg4_decode_mb(MpegEncContext *s,
s->mv[0][0][1] = my;
}
} else {
- s->current_picture.mb_type[xy]= MB_TYPE_8x8 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_8x8 | MB_TYPE_L0;
s->mv_type = MV_TYPE_8X8;
for(i=0;i<4;i++) {
mot_val = h263_pred_motion(s, i, 0, &pred_x, &pred_y);
@@ -1314,7 +1314,7 @@ static int mpeg4_decode_mb(MpegEncContext *s,
}
/* if we skipped it in the future P Frame than skip it now too */
- s->mb_skipped= s->next_picture.mbskip_table[s->mb_y * s->mb_stride + s->mb_x]; // Note, skiptab=0 if last was GMC
+ s->mb_skipped = s->next_picture.f.mbskip_table[s->mb_y * s->mb_stride + s->mb_x]; // Note, skiptab=0 if last was GMC
if(s->mb_skipped){
/* skip mb */
@@ -1327,7 +1327,7 @@ static int mpeg4_decode_mb(MpegEncContext *s,
s->mv[0][0][1] = 0;
s->mv[1][0][0] = 0;
s->mv[1][0][1] = 0;
- s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
goto end;
}
@@ -1433,7 +1433,7 @@ static int mpeg4_decode_mb(MpegEncContext *s,
s->mv_dir = MV_DIR_FORWARD | MV_DIR_BACKWARD | MV_DIRECT;
mb_type |= ff_mpeg4_set_direct_mv(s, mx, my);
}
- s->current_picture.mb_type[xy]= mb_type;
+ s->current_picture.f.mb_type[xy] = mb_type;
} else { /* I-Frame */
do{
cbpc = get_vlc2(&s->gb, ff_h263_intra_MCBPC_vlc.table, INTRA_MCBPC_VLC_BITS, 2);
@@ -1448,9 +1448,9 @@ static int mpeg4_decode_mb(MpegEncContext *s,
intra:
s->ac_pred = get_bits1(&s->gb);
if(s->ac_pred)
- s->current_picture.mb_type[xy]= MB_TYPE_INTRA | MB_TYPE_ACPRED;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA | MB_TYPE_ACPRED;
else
- s->current_picture.mb_type[xy]= MB_TYPE_INTRA;
+ s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA;
cbpy = get_vlc2(&s->gb, ff_h263_cbpy_vlc.table, CBPY_VLC_BITS, 1);
if(cbpy<0){
@@ -1491,12 +1491,12 @@ end:
if(mpeg4_is_resync(s)){
const int delta= s->mb_x + 1 == s->mb_width ? 2 : 1;
- if(s->pict_type==AV_PICTURE_TYPE_B && s->next_picture.mbskip_table[xy + delta]){
+ if (s->pict_type == AV_PICTURE_TYPE_B && s->next_picture.f.mbskip_table[xy + delta]) {
ff_thread_await_progress((AVFrame*)s->next_picture_ptr,
(s->mb_x + delta >= s->mb_width) ? FFMIN(s->mb_y+1, s->mb_height-1) : s->mb_y, 0);
}
- if(s->pict_type==AV_PICTURE_TYPE_B && s->next_picture.mbskip_table[xy + delta])
+ if (s->pict_type == AV_PICTURE_TYPE_B && s->next_picture.f.mbskip_table[xy + delta])
return SLICE_OK;
return SLICE_END;
}
@@ -1961,11 +1961,12 @@ static int decode_vop_header(MpegEncContext *s, GetBitContext *gb){
}
if(s->avctx->time_base.num)
- s->current_picture_ptr->pts= (s->time + s->avctx->time_base.num/2) / s->avctx->time_base.num;
+ s->current_picture_ptr->f.pts = (s->time + s->avctx->time_base.num / 2) / s->avctx->time_base.num;
else
- s->current_picture_ptr->pts= AV_NOPTS_VALUE;
+ s->current_picture_ptr->f.pts = AV_NOPTS_VALUE;
if(s->avctx->debug&FF_DEBUG_PTS)
- av_log(s->avctx, AV_LOG_DEBUG, "MPEG4 PTS: %"PRId64"\n", s->current_picture_ptr->pts);
+ av_log(s->avctx, AV_LOG_DEBUG, "MPEG4 PTS: %"PRId64"\n",
+ s->current_picture_ptr->f.pts);
check_marker(gb, "before vop_coded");