# HG changeset patch # User michaelni # Date 1026671855 0 # Node ID 18ad513d92fe94f81325f49c826adf0f7f2ce7b9 # Parent 26971b5a271d98a7ca14510b9e635f50a8e27ab8 direct rendering method 1 support diff -r 26971b5a271d -r 18ad513d92fe avcodec.h --- a/avcodec.h Sun Jul 14 18:36:24 2002 +0000 +++ b/avcodec.h Sun Jul 14 18:37:35 2002 +0000 @@ -5,8 +5,8 @@ #define LIBAVCODEC_VERSION_INT 0x000406 #define LIBAVCODEC_VERSION "0.4.6" -#define LIBAVCODEC_BUILD 4615 -#define LIBAVCODEC_BUILD_STR "4615" +#define LIBAVCODEC_BUILD 4616 +#define LIBAVCODEC_BUILD_STR "4616" enum CodecID { CODEC_ID_NONE, @@ -100,12 +100,14 @@ #define CODEC_FLAG_PASS1 0x0200 /* use internal 2pass ratecontrol in first pass mode */ #define CODEC_FLAG_PASS2 0x0400 /* use internal 2pass ratecontrol in second pass mode */ #define CODEC_FLAG_EXTERN_HUFF 0x1000 /* use external huffman table (for mjpeg) */ -#define CODEC_FLAG_GRAY 0x2000 /* only decode/encode grayscale */ - +#define CODEC_FLAG_GRAY 0x2000 /* only decode/encode grayscale */ +#define CODEC_FLAG_EMU_EDGE 0x4000/* dont draw edges */ +#define CODEC_FLAG_DR1 0x8000 /* dr1 */ /* codec capabilities */ /* decoder can use draw_horiz_band callback */ #define CODEC_CAP_DRAW_HORIZ_BAND 0x0001 +#define CODEC_CAP_DR1 0x0002 /* direct rendering method 1 */ #define FRAME_RATE_BASE 10000 @@ -248,8 +250,16 @@ #define MBC 128 #define MBR 96 #endif - int *quant_store; /* field for communicating with external postprocessing */ +#define QP_TYPE int //FIXME note xxx this might be changed to int8_t + + QP_TYPE *quant_store; /* field for communicating with external postprocessing */ unsigned qstride; + + uint8_t *dr_buffer[3]; + int dr_stride; + void *dr_opaque_frame; + void (*get_buffer_callback)(struct AVCodecContext *c, int width, int height, int pict_type); + //FIXME this should be reordered after kabis API is finished ... /* Note: Below are located reserved fields for further usage @@ -267,13 +277,12 @@ flt_res6,flt_res7,flt_res8,flt_res9,flt_res10,flt_res11; void *ptr_res0,*ptr_res1,*ptr_res2,*ptr_res3,*ptr_res4,*ptr_res5, - *ptr_res6,*ptr_res7,*ptr_res8,*ptr_res9,*ptr_res10,*ptr_res11; + *ptr_res6; unsigned long int ul_res0,ul_res1,ul_res2,ul_res3,ul_res4,ul_res5, ul_res6,ul_res7,ul_res8,ul_res9,ul_res10,ul_res11,ul_res12; unsigned int - ui_res0,ui_res1,ui_res2,ui_res3,ui_res4,ui_res5, - ui_res6; + ui_res0,ui_res1,ui_res2,ui_res3,ui_res4,ui_res5; unsigned short int us_res0,us_res1,us_res2,us_res3,us_res4,us_res5, us_res6,us_res7,us_res8,us_res9,us_res10,us_res11,us_res12; diff -r 26971b5a271d -r 18ad513d92fe h263dec.c --- a/h263dec.c Sun Jul 14 18:36:24 2002 +0000 +++ b/h263dec.c Sun Jul 14 18:37:35 2002 +0000 @@ -89,7 +89,7 @@ } s->codec_id= avctx->codec->id; avctx->mbskip_table= s->mbskip_table; - + /* for h263, we allocate the images after having read the header */ if (avctx->codec->id != CODEC_ID_H263 && avctx->codec->id != CODEC_ID_MPEG4) if (MPV_common_init(s) < 0) @@ -155,21 +155,22 @@ ret = h263_decode_picture_header(s); } + /* After H263 & mpeg4 header decode we have the height, width,*/ /* and other parameters. So then we could init the picture */ /* FIXME: By the way H263 decoder is evolving it should have */ /* an H263EncContext */ + if (s->width != avctx->width || s->height != avctx->height) { + /* H.263 could change picture size any time */ + MPV_common_end(s); + s->context_initialized=0; + } if (!s->context_initialized) { avctx->width = s->width; avctx->height = s->height; avctx->aspect_ratio_info= s->aspect_ratio_info; if (MPV_common_init(s) < 0) return -1; - } else if (s->width != avctx->width || s->height != avctx->height) { - /* H.263 could change picture size any time */ - MPV_common_end(s); - if (MPV_common_init(s) < 0) - return -1; } if(ret==FRAME_SKIPED) return buf_size; @@ -190,7 +191,7 @@ s->next_p_frame_damaged=0; } - MPV_frame_start(s); + MPV_frame_start(s, avctx); #ifdef DEBUG printf("qscale=%d\n", s->qscale); @@ -459,7 +460,7 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; AVCodec h263_decoder = { @@ -471,7 +472,7 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; AVCodec msmpeg4v1_decoder = { @@ -483,7 +484,7 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; AVCodec msmpeg4v2_decoder = { @@ -495,7 +496,7 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; AVCodec msmpeg4v3_decoder = { @@ -507,7 +508,7 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; AVCodec wmv1_decoder = { @@ -519,7 +520,7 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; AVCodec wmv2_decoder = { @@ -531,7 +532,7 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; AVCodec h263i_decoder = { @@ -543,6 +544,6 @@ NULL, h263_decode_end, h263_decode_frame, - CODEC_CAP_DRAW_HORIZ_BAND, + CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1, }; diff -r 26971b5a271d -r 18ad513d92fe mpegvideo.c --- a/mpegvideo.c Sun Jul 14 18:36:24 2002 +0000 +++ b/mpegvideo.c Sun Jul 14 18:37:35 2002 +0000 @@ -144,9 +144,10 @@ s->mb_width = (s->width + 15) / 16; s->mb_height = (s->height + 15) / 16; s->mb_num = s->mb_width * s->mb_height; - s->linesize = s->mb_width * 16 + 2 * EDGE_WIDTH; + if(!(s->flags&CODEC_FLAG_DR1)){ + s->linesize = s->mb_width * 16 + 2 * EDGE_WIDTH; - for(i=0;i<3;i++) { + for(i=0;i<3;i++) { int w, h, shift, pict_start; w = s->linesize; @@ -173,8 +174,11 @@ s->aux_picture[i] = pict + pict_start; if(i>0) memset(s->aux_picture_base[i], 128, c_size); } + } } + CHECKED_ALLOCZ(s->edge_emu_buffer, (s->width+32)*2*17); + if (s->encoding) { int j; int mv_table_size= (s->mb_width+2)*(s->mb_height+2); @@ -267,7 +271,7 @@ } /* default structure is frame */ s->picture_structure = PICT_FRAME; - + /* init macroblock skip table */ CHECKED_ALLOCZ(s->mbskip_table, s->mb_num); @@ -315,11 +319,22 @@ av_freep(&s->bitstream_buffer); av_freep(&s->tex_pb_buffer); av_freep(&s->pb2_buffer); + av_freep(&s->edge_emu_buffer); + for(i=0;i<3;i++) { int j; - av_freep(&s->last_picture_base[i]); - av_freep(&s->next_picture_base[i]); - av_freep(&s->aux_picture_base[i]); + if(!(s->flags&CODEC_FLAG_DR1)){ + av_freep(&s->last_picture_base[i]); + av_freep(&s->next_picture_base[i]); + av_freep(&s->aux_picture_base[i]); + } + s->last_picture_base[i]= + s->next_picture_base[i]= + s->aux_picture_base [i] = NULL; + s->last_picture[i]= + s->next_picture[i]= + s->aux_picture [i] = NULL; + for(j=0; jpicture_buffer[j][i]); } @@ -595,7 +610,7 @@ } /* generic function for encode/decode called before a frame is coded/decoded */ -void MPV_frame_start(MpegEncContext *s) +void MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx) { int i; UINT8 *tmp; @@ -603,17 +618,39 @@ s->mb_skiped = 0; s->decoding_error=0; + if(avctx->flags&CODEC_FLAG_DR1){ + int i; + avctx->get_buffer_callback(avctx, s->width, s->height, s->pict_type); + + s->linesize= avctx->dr_stride; + } + if (s->pict_type == B_TYPE) { for(i=0;i<3;i++) { + if(avctx->flags&CODEC_FLAG_DR1) + s->aux_picture[i]= avctx->dr_buffer[i]; + s->current_picture[i] = s->aux_picture[i]; } } else { for(i=0;i<3;i++) { /* swap next and last */ - tmp = s->last_picture[i]; + if(avctx->flags&CODEC_FLAG_DR1) + tmp= avctx->dr_buffer[i]; + else + tmp = s->last_picture[i]; + s->last_picture[i] = s->next_picture[i]; s->next_picture[i] = tmp; s->current_picture[i] = tmp; + + s->last_dr_opaque= s->next_dr_opaque; + s->next_dr_opaque= avctx->dr_opaque_frame; + + if(s->has_b_frames && s->last_dr_opaque) + avctx->dr_opaque_frame= s->last_dr_opaque; + else + avctx->dr_opaque_frame= s->next_dr_opaque; } } } @@ -624,7 +661,7 @@ // if((s->picture_number%100)==0 && s->encoding) printf("sads:%d //\n", sads); /* draw edge for correct motion prediction if outside */ - if (s->pict_type != B_TYPE && !s->intra_only) { + if (s->pict_type != B_TYPE && !s->intra_only && !(s->flags&CODEC_FLAG_EMU_EDGE)) { if(s->avctx==NULL || s->avctx->codec->id!=CODEC_ID_MPEG4 || s->divx_version>=500){ draw_edges(s->current_picture[0], s->linesize, s->mb_width*16, s->mb_height*16, EDGE_WIDTH); draw_edges(s->current_picture[1], s->linesize/2, s->mb_width*8, s->mb_height*8, EDGE_WIDTH/2); @@ -776,7 +813,7 @@ s->picture_in_gop_number= s->coded_order[0].picture_in_gop_number; s->picture_number= s->coded_order[0].picture_number; - MPV_frame_start(s); + MPV_frame_start(s, avctx); encode_picture(s, s->picture_number); avctx->key_frame = (s->pict_type == I_TYPE); @@ -879,6 +916,60 @@ return; } +static void emulated_edge_mc(UINT8 *buf, UINT8 *src, int linesize, int block_w, int block_h, + int src_x, int src_y, int w, int h){ + int x, y; + int start_y, start_x, end_y, end_x; + + if(src_y>= h){ + src+= (h-1-src_y)*linesize; + src_y=h-1; + } + if(src_x>= w){ + src+= (w-1-src_x); + src_x=w-1; + } + + start_y= MAX(0, -src_y); + start_x= MAX(0, -src_x); + end_y= MIN(block_h, h-src_y); + end_x= MIN(block_w, w-src_x); + + // copy existing part + for(y=start_y; yquarter_sample) { motion_x>>=1; @@ -909,6 +1002,15 @@ linesize = s->linesize << field_based; ptr = ref_picture[0] + (src_y * linesize) + (src_x) + src_offset; dest_y += dest_offset; + + if(s->flags&CODEC_FLAG_EMU_EDGE){ + if(src_x<0 || src_y<0 || src_x + (motion_x&1) + 16 > s->width + || src_y + (motion_y&1) + h > height){ + emulated_edge_mc(s->edge_emu_buffer, ptr, linesize, 17, h+1, src_x, src_y, s->width, height); + ptr= s->edge_emu_buffer; + emu=1; + } + } pix_op[dxy](dest_y, ptr, linesize, h); pix_op[dxy](dest_y + 8, ptr + 8, linesize, h); @@ -941,8 +1043,17 @@ offset = (src_y * (linesize >> 1)) + src_x + (src_offset >> 1); ptr = ref_picture[1] + offset; + if(emu){ + emulated_edge_mc(s->edge_emu_buffer, ptr, linesize>>1, 9, (h>>1)+1, src_x, src_y, s->width>>1, height>>1); + ptr= s->edge_emu_buffer; + } pix_op[dxy](dest_cb + (dest_offset >> 1), ptr, linesize >> 1, h >> 1); + ptr = ref_picture[2] + offset; + if(emu){ + emulated_edge_mc(s->edge_emu_buffer, ptr, linesize>>1, 9, (h>>1)+1, src_x, src_y, s->width>>1, height>>1); + ptr= s->edge_emu_buffer; + } pix_op[dxy](dest_cr + (dest_offset >> 1), ptr, linesize >> 1, h >> 1); } diff -r 26971b5a271d -r 18ad513d92fe mpegvideo.h --- a/mpegvideo.h Sun Jul 14 18:36:24 2002 +0000 +++ b/mpegvideo.h Sun Jul 14 18:37:35 2002 +0000 @@ -143,6 +143,8 @@ UINT8 *aux_picture[3]; /* aux picture (for B frames only) */ UINT8 *aux_picture_base[3]; /* real start of the picture */ UINT8 *current_picture[3]; /* buffer to store the decompressed current picture */ + void *last_dr_opaque; + void *next_dr_opaque; int num_available_buffers; /* is 0 at the start & after seeking, after the first I frame its 1 after next I/P 2 */ int last_dc[3]; /* last DC values for MPEG1 */ INT16 *dc_val[3]; /* used for mpeg4 DC prediction, all 3 arrays must be continuous */ @@ -159,6 +161,7 @@ UINT8 *cbp_table; /* used to store cbp, ac_pred for partitioned decoding */ UINT8 *pred_dir_table; /* used to store pred_dir for partitioned decoding */ INT8 *qscale_table; /* used to store qscale for partitioned decoding (& postprocessing FIXME export) */ + UINT8 *edge_emu_buffer; int input_qscale; /* qscale prior to reordering of frames */ int input_pict_type; /* pict_type prior to reordering of frames */ @@ -447,7 +450,7 @@ int MPV_common_init(MpegEncContext *s); void MPV_common_end(MpegEncContext *s); void MPV_decode_mb(MpegEncContext *s, DCTELEM block[6][64]); -void MPV_frame_start(MpegEncContext *s); +void MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx); void MPV_frame_end(MpegEncContext *s); #ifdef HAVE_MMX void MPV_common_init_mmx(MpegEncContext *s); diff -r 26971b5a271d -r 18ad513d92fe rv10.c --- a/rv10.c Sun Jul 14 18:36:24 2002 +0000 +++ b/rv10.c Sun Jul 14 18:37:35 2002 +0000 @@ -419,7 +419,7 @@ } if (s->mb_x == 0 && s->mb_y == 0) { - MPV_frame_start(s); + MPV_frame_start(s, avctx); } #ifdef DEBUG