# HG changeset patch # User cehoyos # Date 1232443716 0 # Node ID 2d7269e13a8defaf27f8a77d573f6f4e1eb7d9a7 # Parent ffb82d54ecdc59e798302307ffd28f7c73579c27 Add VDPAU hardware accelerated decoding for WMV3 and VC1 which can be used by video players. Original patch by NVIDIA corporation. diff -r ffb82d54ecdc -r 2d7269e13a8d Makefile --- a/Makefile Mon Jan 19 23:41:46 2009 +0000 +++ b/Makefile Tue Jan 20 09:28:36 2009 +0000 @@ -218,6 +218,7 @@ OBJS-$(CONFIG_ULTI_DECODER) += ulti.o OBJS-$(CONFIG_VB_DECODER) += vb.o OBJS-$(CONFIG_VC1_DECODER) += vc1.o vc1data.o vc1dsp.o msmpeg4data.o h263dec.o h263.o intrax8.o intrax8dsp.o error_resilience.o mpegvideo.o +OBJS-$(CONFIG_VC1_VDPAU_DECODER) += vdpauvideo.o vc1.o vc1data.o vc1dsp.o msmpeg4data.o h263dec.o h263.o intrax8.o intrax8dsp.o error_resilience.o mpegvideo.o OBJS-$(CONFIG_VCR1_DECODER) += vcr1.o OBJS-$(CONFIG_VCR1_ENCODER) += vcr1.o OBJS-$(CONFIG_VMDAUDIO_DECODER) += vmdav.o @@ -241,6 +242,7 @@ OBJS-$(CONFIG_WMV2_DECODER) += wmv2dec.o wmv2.o msmpeg4.o msmpeg4data.o h263dec.o h263.o intrax8.o intrax8dsp.o mpeg12data.o mpegvideo.o error_resilience.o OBJS-$(CONFIG_WMV2_ENCODER) += wmv2enc.o wmv2.o msmpeg4.o msmpeg4data.o mpegvideo_enc.o motion_est.o ratecontrol.o h263.o mpeg12data.o mpegvideo.o error_resilience.o OBJS-$(CONFIG_WMV3_DECODER) += vc1.o vc1data.o vc1dsp.o msmpeg4data.o h263dec.o h263.o intrax8.o intrax8dsp.o error_resilience.o mpegvideo.o +OBJS-$(CONFIG_WMV3_VDPAU_DECODER) += vdpauvideo.o vc1.o vc1data.o vc1dsp.o msmpeg4data.o h263dec.o h263.o intrax8.o intrax8dsp.o error_resilience.o mpegvideo.o OBJS-$(CONFIG_WNV1_DECODER) += wnv1.o OBJS-$(CONFIG_WS_SND1_DECODER) += ws-snd1.o OBJS-$(CONFIG_XAN_DPCM_DECODER) += dpcm.o diff -r ffb82d54ecdc -r 2d7269e13a8d allcodecs.c --- a/allcodecs.c Mon Jan 19 23:41:46 2009 +0000 +++ b/allcodecs.c Tue Jan 20 09:28:36 2009 +0000 @@ -157,6 +157,7 @@ REGISTER_DECODER (ULTI, ulti); REGISTER_DECODER (VB, vb); REGISTER_DECODER (VC1, vc1); + REGISTER_DECODER (VC1_VDPAU, vc1_vdpau); REGISTER_DECODER (VCR1, vcr1); REGISTER_DECODER (VMDVIDEO, vmdvideo); REGISTER_DECODER (VMNC, vmnc); @@ -169,6 +170,7 @@ REGISTER_ENCDEC (WMV1, wmv1); REGISTER_ENCDEC (WMV2, wmv2); REGISTER_DECODER (WMV3, wmv3); + REGISTER_DECODER (WMV3_VDPAU, wmv3_vdpau); REGISTER_DECODER (WNV1, wnv1); REGISTER_DECODER (XAN_WC3, xan_wc3); REGISTER_DECODER (XL, xl); diff -r ffb82d54ecdc -r 2d7269e13a8d imgconvert.c --- a/imgconvert.c Mon Jan 19 23:41:46 2009 +0000 +++ b/imgconvert.c Tue Jan 20 09:28:36 2009 +0000 @@ -276,6 +276,12 @@ [PIX_FMT_VDPAU_H264] = { .name = "vdpau_h264", }, + [PIX_FMT_VDPAU_WMV3] = { + .name = "vdpau_wmv3", + }, + [PIX_FMT_VDPAU_VC1] = { + .name = "vdpau_vc1", + }, [PIX_FMT_UYYVYY411] = { .name = "uyyvyy411", .nb_channels = 1, diff -r ffb82d54ecdc -r 2d7269e13a8d vc1.c --- a/vc1.c Mon Jan 19 23:41:46 2009 +0000 +++ b/vc1.c Tue Jan 20 09:28:36 2009 +0000 @@ -35,6 +35,7 @@ #include "unary.h" #include "simple_idct.h" #include "mathops.h" +#include "vdpau_internal.h" #undef NDEBUG #include @@ -4130,6 +4131,7 @@ MpegEncContext *s = &v->s; AVFrame *pict = data; uint8_t *buf2 = NULL; + const uint8_t *buf_vdpau = buf; /* no supplementary picture */ if (buf_size == 0) { @@ -4151,6 +4153,13 @@ s->current_picture_ptr= &s->picture[i]; } + if (s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU){ + if (v->profile < PROFILE_ADVANCED) + avctx->pix_fmt = PIX_FMT_VDPAU_WMV3; + else + avctx->pix_fmt = PIX_FMT_VDPAU_VC1; + } + //for advanced profile we may need to parse and unescape data if (avctx->codec_id == CODEC_ID_VC1) { int buf_size2 = 0; @@ -4167,6 +4176,8 @@ if(size <= 0) continue; switch(AV_RB32(start)){ case VC1_CODE_FRAME: + if (s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU) + buf_vdpau = start; buf_size2 = vc1_unescape_buffer(start + 4, size, buf2); break; case VC1_CODE_ENTRYPOINT: /* it should be before frame data */ @@ -4255,6 +4266,10 @@ s->me.qpel_put= s->dsp.put_qpel_pixels_tab; s->me.qpel_avg= s->dsp.avg_qpel_pixels_tab; + if ((CONFIG_VC1_VDPAU_DECODER || CONFIG_WMV3_VDPAU_DECODER) + &&s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU) + ff_vdpau_vc1_decode_picture(s, buf_vdpau, (buf + buf_size) - buf_vdpau); + else { ff_er_frame_start(s); v->bits = buf_size * 8; @@ -4263,6 +4278,7 @@ // if(get_bits_count(&s->gb) > buf_size * 8) // return -1; ff_er_frame_end(s); + } MPV_frame_end(s); @@ -4336,3 +4352,35 @@ NULL, .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"), }; + +#if CONFIG_WMV3_VDPAU_DECODER +AVCodec wmv3_vdpau_decoder = { + "wmv3_vdpau", + CODEC_TYPE_VIDEO, + CODEC_ID_WMV3, + sizeof(VC1Context), + vc1_decode_init, + NULL, + vc1_decode_end, + vc1_decode_frame, + CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_HWACCEL_VDPAU, + NULL, + .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 VDPAU"), +}; +#endif + +#if CONFIG_VC1_VDPAU_DECODER +AVCodec vc1_vdpau_decoder = { + "vc1_vdpau", + CODEC_TYPE_VIDEO, + CODEC_ID_VC1, + sizeof(VC1Context), + vc1_decode_init, + NULL, + vc1_decode_end, + vc1_decode_frame, + CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_HWACCEL_VDPAU, + NULL, + .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1 VDPAU"), +}; +#endif diff -r ffb82d54ecdc -r 2d7269e13a8d vdpau.h --- a/vdpau.h Mon Jan 19 23:41:46 2009 +0000 +++ b/vdpau.h Tue Jan 20 09:28:36 2009 +0000 @@ -72,6 +72,7 @@ union VdpPictureInfo { VdpPictureInfoMPEG1Or2 mpeg; VdpPictureInfoH264 h264; + VdpPictureInfoVC1 vc1; } info; /** Describe size/location of the compressed video data. */ diff -r ffb82d54ecdc -r 2d7269e13a8d vdpau_internal.h --- a/vdpau_internal.h Mon Jan 19 23:41:46 2009 +0000 +++ b/vdpau_internal.h Tue Jan 20 09:28:36 2009 +0000 @@ -36,4 +36,7 @@ void ff_vdpau_h264_set_reference_frames(MpegEncContext *s); void ff_vdpau_h264_picture_complete(MpegEncContext *s); +void ff_vdpau_vc1_decode_picture(MpegEncContext *s, const uint8_t *buf, + int buf_size); + #endif /* AVCODEC_VDPAU_INTERNAL_H */ diff -r ffb82d54ecdc -r 2d7269e13a8d vdpauvideo.c --- a/vdpauvideo.c Mon Jan 19 23:41:46 2009 +0000 +++ b/vdpauvideo.c Tue Jan 20 09:28:36 2009 +0000 @@ -24,6 +24,7 @@ #include #include "avcodec.h" #include "h264.h" +#include "vc1.h" #undef NDEBUG #include @@ -232,4 +233,73 @@ render->bitstream_buffers_used = 0; } +void ff_vdpau_vc1_decode_picture(MpegEncContext *s, const uint8_t *buf, + int buf_size) +{ + VC1Context *v = s->avctx->priv_data; + struct vdpau_render_state * render, * last, * next; + + render = (struct vdpau_render_state*)s->current_picture.data[0]; + assert(render); + + /* fill LvPictureInfoVC1 struct */ + render->info.vc1.frame_coding_mode = v->fcm; + render->info.vc1.postprocflag = v->postprocflag; + render->info.vc1.pulldown = v->broadcast; + render->info.vc1.interlace = v->interlace; + render->info.vc1.tfcntrflag = v->tfcntrflag; + render->info.vc1.finterpflag = v->finterpflag; + render->info.vc1.psf = v->psf; + render->info.vc1.dquant = v->dquant; + render->info.vc1.panscan_flag = v->panscanflag; + render->info.vc1.refdist_flag = v->refdist_flag; + render->info.vc1.quantizer = v->quantizer_mode; + render->info.vc1.extended_mv = v->extended_mv; + render->info.vc1.extended_dmv = v->extended_dmv; + render->info.vc1.overlap = v->overlap; + render->info.vc1.vstransform = v->vstransform; + render->info.vc1.loopfilter = v->s.loop_filter; + render->info.vc1.fastuvmc = v->fastuvmc; + render->info.vc1.range_mapy_flag = v->range_mapy_flag; + render->info.vc1.range_mapy = v->range_mapy; + render->info.vc1.range_mapuv_flag = v->range_mapuv_flag; + render->info.vc1.range_mapuv = v->range_mapuv; + /* Specific to simple/main profile only */ + render->info.vc1.multires = v->multires; + render->info.vc1.syncmarker = v->s.resync_marker; + render->info.vc1.rangered = v->rangered; + render->info.vc1.maxbframes = v->s.max_b_frames; + + render->info.vc1.deblockEnable = v->postprocflag & 1; + render->info.vc1.pquant = v->pq; + + render->info.vc1.forward_reference = VDP_INVALID_HANDLE; + render->info.vc1.backward_reference = VDP_INVALID_HANDLE; + + if (v->bi_type) + render->info.vc1.picture_type = 4; + else + render->info.vc1.picture_type = s->pict_type - 1 + s->pict_type / 3; + + switch(s->pict_type){ + case FF_B_TYPE: + next = (struct vdpau_render_state*)s->next_picture.data[0]; + assert(next); + render->info.vc1.backward_reference = next->surface; + // no break here, going to set forward prediction + case FF_P_TYPE: + last = (struct vdpau_render_state*)s->last_picture.data[0]; + if (!last) // FIXME: Does this test make sense? + last = render; // predict second field from the first + render->info.vc1.forward_reference = last->surface; + } + + ff_vdpau_add_data_chunk(s, buf, buf_size); + + render->info.vc1.slice_count = 1; + + ff_draw_horiz_band(s, 0, s->avctx->height); + render->bitstream_buffers_used = 0; +} + /* @}*/