Mercurial > libavcodec.hg
comparison mpegvideo.c @ 10604:ce2cf9e32b09 libavcodec
Set mb_y in mpeg2 field pictures like h264 does.
This fixes -vismv & -debug 16384 with field pictures.
author | michael |
---|---|
date | Mon, 30 Nov 2009 19:14:00 +0000 |
parents | 9a670cfd1941 |
children | 9ea5f2f1874f |
comparison
equal
deleted
inserted
replaced
10603:874e26fe2284 | 10604:ce2cf9e32b09 |
---|---|
1439 /* apply one mpeg motion vector to the three components */ | 1439 /* apply one mpeg motion vector to the three components */ |
1440 static av_always_inline void mpeg_motion_lowres(MpegEncContext *s, | 1440 static av_always_inline void mpeg_motion_lowres(MpegEncContext *s, |
1441 uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, | 1441 uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, |
1442 int field_based, int bottom_field, int field_select, | 1442 int field_based, int bottom_field, int field_select, |
1443 uint8_t **ref_picture, h264_chroma_mc_func *pix_op, | 1443 uint8_t **ref_picture, h264_chroma_mc_func *pix_op, |
1444 int motion_x, int motion_y, int h) | 1444 int motion_x, int motion_y, int h, int mb_y) |
1445 { | 1445 { |
1446 uint8_t *ptr_y, *ptr_cb, *ptr_cr; | 1446 uint8_t *ptr_y, *ptr_cb, *ptr_cr; |
1447 int mx, my, src_x, src_y, uvsrc_x, uvsrc_y, uvlinesize, linesize, sx, sy, uvsx, uvsy; | 1447 int mx, my, src_x, src_y, uvsrc_x, uvsrc_y, uvlinesize, linesize, sx, sy, uvsx, uvsy; |
1448 const int lowres= s->avctx->lowres; | 1448 const int lowres= s->avctx->lowres; |
1449 const int block_s= 8>>lowres; | 1449 const int block_s= 8>>lowres; |
1463 } | 1463 } |
1464 | 1464 |
1465 sx= motion_x & s_mask; | 1465 sx= motion_x & s_mask; |
1466 sy= motion_y & s_mask; | 1466 sy= motion_y & s_mask; |
1467 src_x = s->mb_x*2*block_s + (motion_x >> (lowres+1)); | 1467 src_x = s->mb_x*2*block_s + (motion_x >> (lowres+1)); |
1468 src_y =(s->mb_y*2*block_s>>field_based) + (motion_y >> (lowres+1)); | 1468 src_y =( mb_y*2*block_s>>field_based) + (motion_y >> (lowres+1)); |
1469 | 1469 |
1470 if (s->out_format == FMT_H263) { | 1470 if (s->out_format == FMT_H263) { |
1471 uvsx = ((motion_x>>1) & s_mask) | (sx&1); | 1471 uvsx = ((motion_x>>1) & s_mask) | (sx&1); |
1472 uvsy = ((motion_y>>1) & s_mask) | (sy&1); | 1472 uvsy = ((motion_y>>1) & s_mask) | (sy&1); |
1473 uvsrc_x = src_x>>1; | 1473 uvsrc_x = src_x>>1; |
1476 mx = motion_x / 4; | 1476 mx = motion_x / 4; |
1477 my = motion_y / 4; | 1477 my = motion_y / 4; |
1478 uvsx = (2*mx) & s_mask; | 1478 uvsx = (2*mx) & s_mask; |
1479 uvsy = (2*my) & s_mask; | 1479 uvsy = (2*my) & s_mask; |
1480 uvsrc_x = s->mb_x*block_s + (mx >> lowres); | 1480 uvsrc_x = s->mb_x*block_s + (mx >> lowres); |
1481 uvsrc_y = s->mb_y*block_s + (my >> lowres); | 1481 uvsrc_y = mb_y*block_s + (my >> lowres); |
1482 } else { | 1482 } else { |
1483 mx = motion_x / 2; | 1483 mx = motion_x / 2; |
1484 my = motion_y / 2; | 1484 my = motion_y / 2; |
1485 uvsx = mx & s_mask; | 1485 uvsx = mx & s_mask; |
1486 uvsy = my & s_mask; | 1486 uvsy = my & s_mask; |
1487 uvsrc_x = s->mb_x*block_s + (mx >> (lowres+1)); | 1487 uvsrc_x = s->mb_x*block_s + (mx >> (lowres+1)); |
1488 uvsrc_y =(s->mb_y*block_s>>field_based) + (my >> (lowres+1)); | 1488 uvsrc_y =( mb_y*block_s>>field_based) + (my >> (lowres+1)); |
1489 } | 1489 } |
1490 | 1490 |
1491 ptr_y = ref_picture[0] + src_y * linesize + src_x; | 1491 ptr_y = ref_picture[0] + src_y * linesize + src_x; |
1492 ptr_cb = ref_picture[1] + uvsrc_y * uvlinesize + uvsrc_x; | 1492 ptr_cb = ref_picture[1] + uvsrc_y * uvlinesize + uvsrc_x; |
1493 ptr_cr = ref_picture[2] + uvsrc_y * uvlinesize + uvsrc_x; | 1493 ptr_cr = ref_picture[2] + uvsrc_y * uvlinesize + uvsrc_x; |
1610 switch(s->mv_type) { | 1610 switch(s->mv_type) { |
1611 case MV_TYPE_16X16: | 1611 case MV_TYPE_16X16: |
1612 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, | 1612 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, |
1613 0, 0, 0, | 1613 0, 0, 0, |
1614 ref_picture, pix_op, | 1614 ref_picture, pix_op, |
1615 s->mv[dir][0][0], s->mv[dir][0][1], 2*block_s); | 1615 s->mv[dir][0][0], s->mv[dir][0][1], 2*block_s, mb_y); |
1616 break; | 1616 break; |
1617 case MV_TYPE_8X8: | 1617 case MV_TYPE_8X8: |
1618 mx = 0; | 1618 mx = 0; |
1619 my = 0; | 1619 my = 0; |
1620 for(i=0;i<4;i++) { | 1620 for(i=0;i<4;i++) { |
1637 if (s->picture_structure == PICT_FRAME) { | 1637 if (s->picture_structure == PICT_FRAME) { |
1638 /* top field */ | 1638 /* top field */ |
1639 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, | 1639 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, |
1640 1, 0, s->field_select[dir][0], | 1640 1, 0, s->field_select[dir][0], |
1641 ref_picture, pix_op, | 1641 ref_picture, pix_op, |
1642 s->mv[dir][0][0], s->mv[dir][0][1], block_s); | 1642 s->mv[dir][0][0], s->mv[dir][0][1], block_s, mb_y); |
1643 /* bottom field */ | 1643 /* bottom field */ |
1644 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, | 1644 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, |
1645 1, 1, s->field_select[dir][1], | 1645 1, 1, s->field_select[dir][1], |
1646 ref_picture, pix_op, | 1646 ref_picture, pix_op, |
1647 s->mv[dir][1][0], s->mv[dir][1][1], block_s); | 1647 s->mv[dir][1][0], s->mv[dir][1][1], block_s, mb_y); |
1648 } else { | 1648 } else { |
1649 if(s->picture_structure != s->field_select[dir][0] + 1 && s->pict_type != FF_B_TYPE && !s->first_field){ | 1649 if(s->picture_structure != s->field_select[dir][0] + 1 && s->pict_type != FF_B_TYPE && !s->first_field){ |
1650 ref_picture= s->current_picture_ptr->data; | 1650 ref_picture= s->current_picture_ptr->data; |
1651 } | 1651 } |
1652 | 1652 |
1653 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, | 1653 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, |
1654 0, 0, s->field_select[dir][0], | 1654 0, 0, s->field_select[dir][0], |
1655 ref_picture, pix_op, | 1655 ref_picture, pix_op, |
1656 s->mv[dir][0][0], s->mv[dir][0][1], 2*block_s); | 1656 s->mv[dir][0][0], s->mv[dir][0][1], 2*block_s, mb_y>>1); |
1657 } | 1657 } |
1658 break; | 1658 break; |
1659 case MV_TYPE_16X8: | 1659 case MV_TYPE_16X8: |
1660 for(i=0; i<2; i++){ | 1660 for(i=0; i<2; i++){ |
1661 uint8_t ** ref2picture; | 1661 uint8_t ** ref2picture; |
1667 } | 1667 } |
1668 | 1668 |
1669 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, | 1669 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, |
1670 0, 0, s->field_select[dir][i], | 1670 0, 0, s->field_select[dir][i], |
1671 ref2picture, pix_op, | 1671 ref2picture, pix_op, |
1672 s->mv[dir][i][0], s->mv[dir][i][1] + 2*block_s*i, block_s); | 1672 s->mv[dir][i][0], s->mv[dir][i][1] + 2*block_s*i, block_s, mb_y>>1); |
1673 | 1673 |
1674 dest_y += 2*block_s*s->linesize; | 1674 dest_y += 2*block_s*s->linesize; |
1675 dest_cb+= (2*block_s>>s->chroma_y_shift)*s->uvlinesize; | 1675 dest_cb+= (2*block_s>>s->chroma_y_shift)*s->uvlinesize; |
1676 dest_cr+= (2*block_s>>s->chroma_y_shift)*s->uvlinesize; | 1676 dest_cr+= (2*block_s>>s->chroma_y_shift)*s->uvlinesize; |
1677 } | 1677 } |
1682 int j; | 1682 int j; |
1683 for(j=0; j<2; j++){ | 1683 for(j=0; j<2; j++){ |
1684 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, | 1684 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, |
1685 1, j, j^i, | 1685 1, j, j^i, |
1686 ref_picture, pix_op, | 1686 ref_picture, pix_op, |
1687 s->mv[dir][2*i + j][0], s->mv[dir][2*i + j][1], block_s); | 1687 s->mv[dir][2*i + j][0], s->mv[dir][2*i + j][1], block_s, mb_y); |
1688 } | 1688 } |
1689 pix_op = s->dsp.avg_h264_chroma_pixels_tab; | 1689 pix_op = s->dsp.avg_h264_chroma_pixels_tab; |
1690 } | 1690 } |
1691 }else{ | 1691 }else{ |
1692 for(i=0; i<2; i++){ | 1692 for(i=0; i<2; i++){ |
1693 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, | 1693 mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr, |
1694 0, 0, s->picture_structure != i+1, | 1694 0, 0, s->picture_structure != i+1, |
1695 ref_picture, pix_op, | 1695 ref_picture, pix_op, |
1696 s->mv[dir][2*i][0],s->mv[dir][2*i][1],2*block_s); | 1696 s->mv[dir][2*i][0],s->mv[dir][2*i][1],2*block_s, mb_y>>1); |
1697 | 1697 |
1698 // after put we make avg of the same block | 1698 // after put we make avg of the same block |
1699 pix_op = s->dsp.avg_h264_chroma_pixels_tab; | 1699 pix_op = s->dsp.avg_h264_chroma_pixels_tab; |
1700 | 1700 |
1701 //opposite parity is always in the same frame if this is second field | 1701 //opposite parity is always in the same frame if this is second field |
2032 * @param h is the normal height, this will be reduced automatically if needed for the last row | 2032 * @param h is the normal height, this will be reduced automatically if needed for the last row |
2033 */ | 2033 */ |
2034 void ff_draw_horiz_band(MpegEncContext *s, int y, int h){ | 2034 void ff_draw_horiz_band(MpegEncContext *s, int y, int h){ |
2035 if (s->avctx->draw_horiz_band) { | 2035 if (s->avctx->draw_horiz_band) { |
2036 AVFrame *src; | 2036 AVFrame *src; |
2037 const int field_pic= s->picture_structure != PICT_FRAME; | |
2037 int offset[4]; | 2038 int offset[4]; |
2038 | 2039 |
2039 if(s->picture_structure != PICT_FRAME){ | 2040 h= FFMIN(h, (s->avctx->height>>field_pic) - y); |
2041 | |
2042 if(field_pic && !(s->avctx->slice_flags&SLICE_FLAG_ALLOW_FIELD)){ | |
2040 h <<= 1; | 2043 h <<= 1; |
2041 y <<= 1; | 2044 y <<= 1; |
2042 if(s->first_field && !(s->avctx->slice_flags&SLICE_FLAG_ALLOW_FIELD)) return; | 2045 if(s->first_field) return; |
2043 } | 2046 } |
2044 | |
2045 h= FFMIN(h, s->avctx->height - y); | |
2046 | 2047 |
2047 if(s->pict_type==FF_B_TYPE || s->low_delay || (s->avctx->slice_flags&SLICE_FLAG_CODED_ORDER)) | 2048 if(s->pict_type==FF_B_TYPE || s->low_delay || (s->avctx->slice_flags&SLICE_FLAG_CODED_ORDER)) |
2048 src= (AVFrame*)s->current_picture_ptr; | 2049 src= (AVFrame*)s->current_picture_ptr; |
2049 else if(s->last_picture_ptr) | 2050 else if(s->last_picture_ptr) |
2050 src= (AVFrame*)s->last_picture_ptr; | 2051 src= (AVFrame*)s->last_picture_ptr; |
2087 s->dest[1] = s->current_picture.data[1] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift)); | 2088 s->dest[1] = s->current_picture.data[1] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift)); |
2088 s->dest[2] = s->current_picture.data[2] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift)); | 2089 s->dest[2] = s->current_picture.data[2] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift)); |
2089 | 2090 |
2090 if(!(s->pict_type==FF_B_TYPE && s->avctx->draw_horiz_band && s->picture_structure==PICT_FRAME)) | 2091 if(!(s->pict_type==FF_B_TYPE && s->avctx->draw_horiz_band && s->picture_structure==PICT_FRAME)) |
2091 { | 2092 { |
2093 if(s->picture_structure==PICT_FRAME){ | |
2092 s->dest[0] += s->mb_y * linesize << mb_size; | 2094 s->dest[0] += s->mb_y * linesize << mb_size; |
2093 s->dest[1] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift); | 2095 s->dest[1] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift); |
2094 s->dest[2] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift); | 2096 s->dest[2] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift); |
2097 }else{ | |
2098 s->dest[0] += (s->mb_y>>1) * linesize << mb_size; | |
2099 s->dest[1] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift); | |
2100 s->dest[2] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift); | |
2101 assert((s->mb_y&1) == (s->picture_structure == PICT_BOTTOM_FIELD)); | |
2102 } | |
2095 } | 2103 } |
2096 } | 2104 } |
2097 | 2105 |
2098 void ff_mpeg_flush(AVCodecContext *avctx){ | 2106 void ff_mpeg_flush(AVCodecContext *avctx){ |
2099 int i; | 2107 int i; |