summaryrefslogtreecommitdiff
path: root/libavcodec/h263.c
diff options
context:
space:
mode:
authorAnton Khirnov <anton@khirnov.net>2012-11-21 21:34:46 +0100
committerAnton Khirnov <anton@khirnov.net>2013-03-08 07:38:30 +0100
commit759001c534287a96dc96d1e274665feb7059145d (patch)
tree6ace9560c20aa30db92067c5b45d7bd86e458d10 /libavcodec/h263.c
parent6e7b50b4270116ded8b874d76cb7c5b1a0341827 (diff)
lavc decoders: work with refcounted frames.
Diffstat (limited to 'libavcodec/h263.c')
-rw-r--r--libavcodec/h263.c48
1 files changed, 24 insertions, 24 deletions
diff --git a/libavcodec/h263.c b/libavcodec/h263.c
index 5ff47de76d..c097033951 100644
--- a/libavcodec/h263.c
+++ b/libavcodec/h263.c
@@ -51,7 +51,7 @@ void ff_h263_update_motion_val(MpegEncContext * s){
const int wrap = s->b8_stride;
const int xy = s->block_index[0];
- s->current_picture.f.mbskip_table[mb_xy] = s->mb_skipped;
+ s->current_picture.mbskip_table[mb_xy] = s->mb_skipped;
if(s->mv_type != MV_TYPE_8X8){
int motion_x, motion_y;
@@ -70,30 +70,30 @@ void ff_h263_update_motion_val(MpegEncContext * s){
s->p_field_mv_table[i][0][mb_xy][0]= s->mv[0][i][0];
s->p_field_mv_table[i][0][mb_xy][1]= s->mv[0][i][1];
}
- s->current_picture.f.ref_index[0][4*mb_xy ] =
- s->current_picture.f.ref_index[0][4*mb_xy + 1] = s->field_select[0][0];
- s->current_picture.f.ref_index[0][4*mb_xy + 2] =
- s->current_picture.f.ref_index[0][4*mb_xy + 3] = s->field_select[0][1];
+ s->current_picture.ref_index[0][4*mb_xy ] =
+ s->current_picture.ref_index[0][4*mb_xy + 1] = s->field_select[0][0];
+ s->current_picture.ref_index[0][4*mb_xy + 2] =
+ s->current_picture.ref_index[0][4*mb_xy + 3] = s->field_select[0][1];
}
/* no update if 8X8 because it has been done during parsing */
- s->current_picture.f.motion_val[0][xy][0] = motion_x;
- s->current_picture.f.motion_val[0][xy][1] = motion_y;
- s->current_picture.f.motion_val[0][xy + 1][0] = motion_x;
- s->current_picture.f.motion_val[0][xy + 1][1] = motion_y;
- s->current_picture.f.motion_val[0][xy + wrap][0] = motion_x;
- s->current_picture.f.motion_val[0][xy + wrap][1] = motion_y;
- s->current_picture.f.motion_val[0][xy + 1 + wrap][0] = motion_x;
- s->current_picture.f.motion_val[0][xy + 1 + wrap][1] = motion_y;
+ s->current_picture.motion_val[0][xy][0] = motion_x;
+ s->current_picture.motion_val[0][xy][1] = motion_y;
+ s->current_picture.motion_val[0][xy + 1][0] = motion_x;
+ s->current_picture.motion_val[0][xy + 1][1] = motion_y;
+ s->current_picture.motion_val[0][xy + wrap][0] = motion_x;
+ s->current_picture.motion_val[0][xy + wrap][1] = motion_y;
+ s->current_picture.motion_val[0][xy + 1 + wrap][0] = motion_x;
+ s->current_picture.motion_val[0][xy + 1 + wrap][1] = motion_y;
}
if(s->encoding){ //FIXME encoding MUST be cleaned up
if (s->mv_type == MV_TYPE_8X8)
- s->current_picture.f.mb_type[mb_xy] = MB_TYPE_L0 | MB_TYPE_8x8;
+ s->current_picture.mb_type[mb_xy] = MB_TYPE_L0 | MB_TYPE_8x8;
else if(s->mb_intra)
- s->current_picture.f.mb_type[mb_xy] = MB_TYPE_INTRA;
+ s->current_picture.mb_type[mb_xy] = MB_TYPE_INTRA;
else
- s->current_picture.f.mb_type[mb_xy] = MB_TYPE_L0 | MB_TYPE_16x16;
+ s->current_picture.mb_type[mb_xy] = MB_TYPE_L0 | MB_TYPE_16x16;
}
}
@@ -153,7 +153,7 @@ void ff_h263_loop_filter(MpegEncContext * s){
Diag Top
Left Center
*/
- if (!IS_SKIP(s->current_picture.f.mb_type[xy])) {
+ if (!IS_SKIP(s->current_picture.mb_type[xy])) {
qp_c= s->qscale;
s->dsp.h263_v_loop_filter(dest_y+8*linesize , linesize, qp_c);
s->dsp.h263_v_loop_filter(dest_y+8*linesize+8, linesize, qp_c);
@@ -163,10 +163,10 @@ void ff_h263_loop_filter(MpegEncContext * s){
if(s->mb_y){
int qp_dt, qp_tt, qp_tc;
- if (IS_SKIP(s->current_picture.f.mb_type[xy - s->mb_stride]))
+ if (IS_SKIP(s->current_picture.mb_type[xy - s->mb_stride]))
qp_tt=0;
else
- qp_tt = s->current_picture.f.qscale_table[xy - s->mb_stride];
+ qp_tt = s->current_picture.qscale_table[xy - s->mb_stride];
if(qp_c)
qp_tc= qp_c;
@@ -186,10 +186,10 @@ void ff_h263_loop_filter(MpegEncContext * s){
s->dsp.h263_h_loop_filter(dest_y-8*linesize+8 , linesize, qp_tt);
if(s->mb_x){
- if (qp_tt || IS_SKIP(s->current_picture.f.mb_type[xy - 1 - s->mb_stride]))
+ if (qp_tt || IS_SKIP(s->current_picture.mb_type[xy - 1 - s->mb_stride]))
qp_dt= qp_tt;
else
- qp_dt = s->current_picture.f.qscale_table[xy - 1 - s->mb_stride];
+ qp_dt = s->current_picture.qscale_table[xy - 1 - s->mb_stride];
if(qp_dt){
const int chroma_qp= s->chroma_qscale_table[qp_dt];
@@ -208,10 +208,10 @@ void ff_h263_loop_filter(MpegEncContext * s){
if(s->mb_x){
int qp_lc;
- if (qp_c || IS_SKIP(s->current_picture.f.mb_type[xy - 1]))
+ if (qp_c || IS_SKIP(s->current_picture.mb_type[xy - 1]))
qp_lc= qp_c;
else
- qp_lc = s->current_picture.f.qscale_table[xy - 1];
+ qp_lc = s->current_picture.qscale_table[xy - 1];
if(qp_lc){
s->dsp.h263_h_loop_filter(dest_y, linesize, qp_lc);
@@ -320,7 +320,7 @@ int16_t *ff_h263_pred_motion(MpegEncContext * s, int block, int dir,
static const int off[4]= {2, 1, 1, -1};
wrap = s->b8_stride;
- mot_val = s->current_picture.f.motion_val[dir] + s->block_index[block];
+ mot_val = s->current_picture.motion_val[dir] + s->block_index[block];
A = mot_val[ - 1];
/* special case for first (slice) line */