summaryrefslogtreecommitdiff
path: root/libavcodec/motion_est.c
diff options
context:
space:
mode:
authorAnton Khirnov <anton@khirnov.net>2012-11-21 21:34:46 +0100
committerAnton Khirnov <anton@khirnov.net>2013-03-08 07:38:30 +0100
commit759001c534287a96dc96d1e274665feb7059145d (patch)
tree6ace9560c20aa30db92067c5b45d7bd86e458d10 /libavcodec/motion_est.c
parent6e7b50b4270116ded8b874d76cb7c5b1a0341827 (diff)
lavc decoders: work with refcounted frames.
Diffstat (limited to 'libavcodec/motion_est.c')
-rw-r--r--libavcodec/motion_est.c62
1 files changed, 31 insertions, 31 deletions
diff --git a/libavcodec/motion_est.c b/libavcodec/motion_est.c
index d70021c4d3..99c5564544 100644
--- a/libavcodec/motion_est.c
+++ b/libavcodec/motion_est.c
@@ -494,16 +494,16 @@ static inline void set_p_mv_tables(MpegEncContext * s, int mx, int my, int mv4)
if(mv4){
int mot_xy= s->block_index[0];
- s->current_picture.f.motion_val[0][mot_xy ][0] = mx;
- s->current_picture.f.motion_val[0][mot_xy ][1] = my;
- s->current_picture.f.motion_val[0][mot_xy + 1][0] = mx;
- s->current_picture.f.motion_val[0][mot_xy + 1][1] = my;
+ s->current_picture.motion_val[0][mot_xy ][0] = mx;
+ s->current_picture.motion_val[0][mot_xy ][1] = my;
+ s->current_picture.motion_val[0][mot_xy + 1][0] = mx;
+ s->current_picture.motion_val[0][mot_xy + 1][1] = my;
mot_xy += s->b8_stride;
- s->current_picture.f.motion_val[0][mot_xy ][0] = mx;
- s->current_picture.f.motion_val[0][mot_xy ][1] = my;
- s->current_picture.f.motion_val[0][mot_xy + 1][0] = mx;
- s->current_picture.f.motion_val[0][mot_xy + 1][1] = my;
+ s->current_picture.motion_val[0][mot_xy ][0] = mx;
+ s->current_picture.motion_val[0][mot_xy ][1] = my;
+ s->current_picture.motion_val[0][mot_xy + 1][0] = mx;
+ s->current_picture.motion_val[0][mot_xy + 1][1] = my;
}
}
@@ -576,8 +576,8 @@ static inline int h263_mv4_search(MpegEncContext *s, int mx, int my, int shift)
const int mot_stride = s->b8_stride;
const int mot_xy = s->block_index[block];
- P_LEFT[0] = s->current_picture.f.motion_val[0][mot_xy - 1][0];
- P_LEFT[1] = s->current_picture.f.motion_val[0][mot_xy - 1][1];
+ P_LEFT[0] = s->current_picture.motion_val[0][mot_xy - 1][0];
+ P_LEFT[1] = s->current_picture.motion_val[0][mot_xy - 1][1];
if(P_LEFT[0] > (c->xmax<<shift)) P_LEFT[0] = (c->xmax<<shift);
@@ -586,10 +586,10 @@ static inline int h263_mv4_search(MpegEncContext *s, int mx, int my, int shift)
c->pred_x= pred_x4= P_LEFT[0];
c->pred_y= pred_y4= P_LEFT[1];
} else {
- P_TOP[0] = s->current_picture.f.motion_val[0][mot_xy - mot_stride ][0];
- P_TOP[1] = s->current_picture.f.motion_val[0][mot_xy - mot_stride ][1];
- P_TOPRIGHT[0] = s->current_picture.f.motion_val[0][mot_xy - mot_stride + off[block]][0];
- P_TOPRIGHT[1] = s->current_picture.f.motion_val[0][mot_xy - mot_stride + off[block]][1];
+ P_TOP[0] = s->current_picture.motion_val[0][mot_xy - mot_stride ][0];
+ P_TOP[1] = s->current_picture.motion_val[0][mot_xy - mot_stride ][1];
+ P_TOPRIGHT[0] = s->current_picture.motion_val[0][mot_xy - mot_stride + off[block]][0];
+ P_TOPRIGHT[1] = s->current_picture.motion_val[0][mot_xy - mot_stride + off[block]][1];
if(P_TOP[1] > (c->ymax<<shift)) P_TOP[1] = (c->ymax<<shift);
if(P_TOPRIGHT[0] < (c->xmin<<shift)) P_TOPRIGHT[0]= (c->xmin<<shift);
if(P_TOPRIGHT[0] > (c->xmax<<shift)) P_TOPRIGHT[0]= (c->xmax<<shift);
@@ -641,8 +641,8 @@ static inline int h263_mv4_search(MpegEncContext *s, int mx, int my, int shift)
my4_sum+= my4;
}
- s->current_picture.f.motion_val[0][s->block_index[block]][0] = mx4;
- s->current_picture.f.motion_val[0][s->block_index[block]][1] = my4;
+ s->current_picture.motion_val[0][s->block_index[block]][0] = mx4;
+ s->current_picture.motion_val[0][s->block_index[block]][1] = my4;
if(mx4 != mx || my4 != my) same=0;
}
@@ -894,16 +894,16 @@ void ff_estimate_p_frame_motion(MpegEncContext * s,
const int mot_stride = s->b8_stride;
const int mot_xy = s->block_index[0];
- P_LEFT[0] = s->current_picture.f.motion_val[0][mot_xy - 1][0];
- P_LEFT[1] = s->current_picture.f.motion_val[0][mot_xy - 1][1];
+ P_LEFT[0] = s->current_picture.motion_val[0][mot_xy - 1][0];
+ P_LEFT[1] = s->current_picture.motion_val[0][mot_xy - 1][1];
if(P_LEFT[0] > (c->xmax<<shift)) P_LEFT[0] = (c->xmax<<shift);
if(!s->first_slice_line) {
- P_TOP[0] = s->current_picture.f.motion_val[0][mot_xy - mot_stride ][0];
- P_TOP[1] = s->current_picture.f.motion_val[0][mot_xy - mot_stride ][1];
- P_TOPRIGHT[0] = s->current_picture.f.motion_val[0][mot_xy - mot_stride + 2][0];
- P_TOPRIGHT[1] = s->current_picture.f.motion_val[0][mot_xy - mot_stride + 2][1];
+ P_TOP[0] = s->current_picture.motion_val[0][mot_xy - mot_stride ][0];
+ P_TOP[1] = s->current_picture.motion_val[0][mot_xy - mot_stride ][1];
+ P_TOPRIGHT[0] = s->current_picture.motion_val[0][mot_xy - mot_stride + 2][0];
+ P_TOPRIGHT[1] = s->current_picture.motion_val[0][mot_xy - mot_stride + 2][1];
if(P_TOP[1] > (c->ymax<<shift)) P_TOP[1] = (c->ymax<<shift);
if(P_TOPRIGHT[0] < (c->xmin<<shift)) P_TOPRIGHT[0]= (c->xmin<<shift);
if(P_TOPRIGHT[1] > (c->ymax<<shift)) P_TOPRIGHT[1]= (c->ymax<<shift);
@@ -1035,9 +1035,9 @@ void ff_estimate_p_frame_motion(MpegEncContext * s,
if(intra_score < dmin){
mb_type= CANDIDATE_MB_TYPE_INTRA;
- s->current_picture.f.mb_type[mb_y*s->mb_stride + mb_x] = CANDIDATE_MB_TYPE_INTRA; //FIXME cleanup
+ s->current_picture.mb_type[mb_y*s->mb_stride + mb_x] = CANDIDATE_MB_TYPE_INTRA; //FIXME cleanup
}else
- s->current_picture.f.mb_type[mb_y*s->mb_stride + mb_x] = 0;
+ s->current_picture.mb_type[mb_y*s->mb_stride + mb_x] = 0;
{
int p_score= FFMIN(vard, varc-500+(s->lambda2>>FF_LAMBDA_SHIFT)*100);
@@ -1403,7 +1403,7 @@ static inline int direct_search(MpegEncContext * s, int mb_x, int mb_y)
ymin= xmin=(-32)>>shift;
ymax= xmax= 31>>shift;
- if (IS_8X8(s->next_picture.f.mb_type[mot_xy])) {
+ if (IS_8X8(s->next_picture.mb_type[mot_xy])) {
s->mv_type= MV_TYPE_8X8;
}else{
s->mv_type= MV_TYPE_16X16;
@@ -1413,8 +1413,8 @@ static inline int direct_search(MpegEncContext * s, int mb_x, int mb_y)
int index= s->block_index[i];
int min, max;
- c->co_located_mv[i][0] = s->next_picture.f.motion_val[0][index][0];
- c->co_located_mv[i][1] = s->next_picture.f.motion_val[0][index][1];
+ c->co_located_mv[i][0] = s->next_picture.motion_val[0][index][0];
+ c->co_located_mv[i][1] = s->next_picture.motion_val[0][index][1];
c->direct_basis_mv[i][0]= c->co_located_mv[i][0]*time_pb/time_pp + ((i& 1)<<(shift+3));
c->direct_basis_mv[i][1]= c->co_located_mv[i][1]*time_pb/time_pp + ((i>>1)<<(shift+3));
// c->direct_basis_mv[1][i][0]= c->co_located_mv[i][0]*(time_pb - time_pp)/time_pp + ((i &1)<<(shift+3);
@@ -1503,7 +1503,7 @@ void ff_estimate_b_frame_motion(MpegEncContext * s,
c->skip=0;
- if (s->codec_id == AV_CODEC_ID_MPEG4 && s->next_picture.f.mbskip_table[xy]) {
+ if (s->codec_id == AV_CODEC_ID_MPEG4 && s->next_picture.mbskip_table[xy]) {
int score= direct_search(s, mb_x, mb_y); //FIXME just check 0,0
score= ((unsigned)(score*score + 128*256))>>16;
@@ -1674,14 +1674,14 @@ void ff_fix_long_p_mvs(MpegEncContext * s)
int block;
for(block=0; block<4; block++){
int off= (block& 1) + (block>>1)*wrap;
- int mx = s->current_picture.f.motion_val[0][ xy + off ][0];
- int my = s->current_picture.f.motion_val[0][ xy + off ][1];
+ int mx = s->current_picture.motion_val[0][ xy + off ][0];
+ int my = s->current_picture.motion_val[0][ xy + off ][1];
if( mx >=range || mx <-range
|| my >=range || my <-range){
s->mb_type[i] &= ~CANDIDATE_MB_TYPE_INTER4V;
s->mb_type[i] |= CANDIDATE_MB_TYPE_INTRA;
- s->current_picture.f.mb_type[i] = CANDIDATE_MB_TYPE_INTRA;
+ s->current_picture.mb_type[i] = CANDIDATE_MB_TYPE_INTRA;
}
}
}