comparison huffyuv.c @ 4682:95875881b85b libavcodec

huffyuv rgb encoder
author lorenm
date Sun, 18 Mar 2007 12:03:29 +0000
parents ce643a22f049
children 0f596a49ba3d
comparison
equal deleted inserted replaced
4681:b82cff2e2e83 4682:95875881b85b
204 s->dsp.diff_bytes(dst+16, src+16, src+15, w-16); 204 s->dsp.diff_bytes(dst+16, src+16, src+15, w-16);
205 return src[w-1]; 205 return src[w-1];
206 } 206 }
207 } 207 }
208 208
209 static inline void sub_left_prediction_bgr32(HYuvContext *s, uint8_t *dst, uint8_t *src, int w, int *red, int *green, int *blue){
210 int i;
211 int r,g,b;
212 r= *red;
213 g= *green;
214 b= *blue;
215 for(i=0; i<FFMIN(w,4); i++){
216 const int rt= src[i*4+R];
217 const int gt= src[i*4+G];
218 const int bt= src[i*4+B];
219 dst[i*4+R]= rt - r;
220 dst[i*4+G]= gt - g;
221 dst[i*4+B]= bt - b;
222 r = rt;
223 g = gt;
224 b = bt;
225 }
226 s->dsp.diff_bytes(dst+16, src+16, src+12, w*4-16);
227 *red= src[(w-1)*4+R];
228 *green= src[(w-1)*4+G];
229 *blue= src[(w-1)*4+B];
230 }
231
209 static void read_len_table(uint8_t *dst, GetBitContext *gb){ 232 static void read_len_table(uint8_t *dst, GetBitContext *gb){
210 int i, val, repeat; 233 int i, val, repeat;
211 234
212 for(i=0; i<256;){ 235 for(i=0; i<256;){
213 repeat= get_bits(gb, 3); 236 repeat= get_bits(gb, 3);
358 if(s->bitstream_bpp<24){ 381 if(s->bitstream_bpp<24){
359 for(i=0; i<3; i++){ 382 for(i=0; i<3; i++){
360 s->temp[i]= av_malloc(s->width + 16); 383 s->temp[i]= av_malloc(s->width + 16);
361 } 384 }
362 }else{ 385 }else{
363 s->temp[0]= av_malloc(4*s->width + 16); 386 for(i=0; i<2; i++){
387 s->temp[i]= av_malloc(4*s->width + 16);
388 }
364 } 389 }
365 } 390 }
366 391
367 static int common_init(AVCodecContext *avctx){ 392 static int common_init(AVCodecContext *avctx){
368 HYuvContext *s = avctx->priv_data; 393 HYuvContext *s = avctx->priv_data;
519 s->bitstream_bpp= 12; 544 s->bitstream_bpp= 12;
520 break; 545 break;
521 case PIX_FMT_YUV422P: 546 case PIX_FMT_YUV422P:
522 s->bitstream_bpp= 16; 547 s->bitstream_bpp= 16;
523 break; 548 break;
549 case PIX_FMT_RGB32:
550 s->bitstream_bpp= 24;
551 break;
524 default: 552 default:
525 av_log(avctx, AV_LOG_ERROR, "format not supported\n"); 553 av_log(avctx, AV_LOG_ERROR, "format not supported\n");
526 return -1; 554 return -1;
527 } 555 }
528 avctx->bits_per_sample= s->bitstream_bpp; 556 avctx->bits_per_sample= s->bitstream_bpp;
548 } 576 }
549 if(s->interlaced != ( s->height > 288 )) 577 if(s->interlaced != ( s->height > 288 ))
550 av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n"); 578 av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n");
551 } 579 }
552 580
553 ((uint8_t*)avctx->extradata)[0]= s->predictor; 581 if(s->bitstream_bpp>=24 && s->predictor==MEDIAN){
582 av_log(avctx, AV_LOG_ERROR, "Error: RGB is incompatible with median predictor\n");
583 return -1;
584 }
585
586 ((uint8_t*)avctx->extradata)[0]= s->predictor | (s->decorrelate << 6);
554 ((uint8_t*)avctx->extradata)[1]= s->bitstream_bpp; 587 ((uint8_t*)avctx->extradata)[1]= s->bitstream_bpp;
555 ((uint8_t*)avctx->extradata)[2]= s->interlaced ? 0x10 : 0x20; 588 ((uint8_t*)avctx->extradata)[2]= s->interlaced ? 0x10 : 0x20;
556 if(s->context) 589 if(s->context)
557 ((uint8_t*)avctx->extradata)[2]|= 0x40; 590 ((uint8_t*)avctx->extradata)[2]|= 0x40;
558 ((uint8_t*)avctx->extradata)[3]= 0; 591 ((uint8_t*)avctx->extradata)[3]= 0;
757 } 790 }
758 } 791 }
759 } 792 }
760 793
761 #ifdef CONFIG_DECODERS 794 #ifdef CONFIG_DECODERS
795 static int encode_bgr_bitstream(HYuvContext *s, int count){
796 int i;
797
798 if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < 3*4*count){
799 av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
800 return -1;
801 }
802
803 if((s->flags&CODEC_FLAG_PASS1) && (s->avctx->flags2&CODEC_FLAG2_NO_OUTPUT)){
804 for(i=0; i<count; i++){
805 int g= s->temp[0][4*i+G];
806 int b= (s->temp[0][4*i+B] - g) & 0xff;
807 int r= (s->temp[0][4*i+R] - g) & 0xff;
808 s->stats[0][b]++;
809 s->stats[1][g]++;
810 s->stats[2][r]++;
811 }
812 }else if(s->context || (s->flags&CODEC_FLAG_PASS1)){
813 for(i=0; i<count; i++){
814 int g= s->temp[0][4*i+G];
815 int b= (s->temp[0][4*i+B] - g) & 0xff;
816 int r= (s->temp[0][4*i+R] - g) & 0xff;
817 s->stats[0][b]++;
818 s->stats[1][g]++;
819 s->stats[2][r]++;
820 put_bits(&s->pb, s->len[1][g], s->bits[1][g]);
821 put_bits(&s->pb, s->len[0][b], s->bits[0][b]);
822 put_bits(&s->pb, s->len[2][r], s->bits[2][r]);
823 }
824 }else{
825 for(i=0; i<count; i++){
826 int g= s->temp[0][4*i+G];
827 int b= (s->temp[0][4*i+B] - g) & 0xff;
828 int r= (s->temp[0][4*i+R] - g) & 0xff;
829 put_bits(&s->pb, s->len[1][g], s->bits[1][g]);
830 put_bits(&s->pb, s->len[0][b], s->bits[0][b]);
831 put_bits(&s->pb, s->len[2][r], s->bits[2][r]);
832 }
833 }
834 return 0;
835 }
836
762 static void draw_slice(HYuvContext *s, int y){ 837 static void draw_slice(HYuvContext *s, int y){
763 int h, cy; 838 int h, cy;
764 int offset[4]; 839 int offset[4];
765 840
766 if(s->avctx->draw_horiz_band==NULL) 841 if(s->avctx->draw_horiz_band==NULL)
1187 } 1262 }
1188 1263
1189 encode_422_bitstream(s, width); 1264 encode_422_bitstream(s, width);
1190 } 1265 }
1191 } 1266 }
1267 }else if(avctx->pix_fmt == PIX_FMT_RGB32){
1268 uint8_t *data = p->data[0] + (height-1)*p->linesize[0];
1269 const int stride = -p->linesize[0];
1270 const int fake_stride = -fake_ystride;
1271 int y;
1272 int leftr, leftg, leftb;
1273
1274 put_bits(&s->pb, 8, leftr= data[R]);
1275 put_bits(&s->pb, 8, leftg= data[G]);
1276 put_bits(&s->pb, 8, leftb= data[B]);
1277 put_bits(&s->pb, 8, 0);
1278
1279 sub_left_prediction_bgr32(s, s->temp[0], data+4, width-1, &leftr, &leftg, &leftb);
1280 encode_bgr_bitstream(s, width-1);
1281
1282 for(y=1; y<s->height; y++){
1283 uint8_t *dst = data + y*stride;
1284 if(s->predictor == PLANE && s->interlaced < y){
1285 s->dsp.diff_bytes(s->temp[1], dst, dst - fake_stride, width*4);
1286 sub_left_prediction_bgr32(s, s->temp[0], s->temp[1], width, &leftr, &leftg, &leftb);
1287 }else{
1288 sub_left_prediction_bgr32(s, s->temp[0], dst, width, &leftr, &leftg, &leftb);
1289 }
1290 encode_bgr_bitstream(s, width);
1291 }
1192 }else{ 1292 }else{
1193 av_log(avctx, AV_LOG_ERROR, "Format not supported!\n"); 1293 av_log(avctx, AV_LOG_ERROR, "Format not supported!\n");
1194 } 1294 }
1195 emms_c(); 1295 emms_c();
1196 1296
1271 CODEC_ID_HUFFYUV, 1371 CODEC_ID_HUFFYUV,
1272 sizeof(HYuvContext), 1372 sizeof(HYuvContext),
1273 encode_init, 1373 encode_init,
1274 encode_frame, 1374 encode_frame,
1275 encode_end, 1375 encode_end,
1276 .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV422P, -1}, 1376 .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV422P, PIX_FMT_RGB32, -1},
1277 }; 1377 };
1278 1378
1279 AVCodec ffvhuff_encoder = { 1379 AVCodec ffvhuff_encoder = {
1280 "ffvhuff", 1380 "ffvhuff",
1281 CODEC_TYPE_VIDEO, 1381 CODEC_TYPE_VIDEO,
1282 CODEC_ID_FFVHUFF, 1382 CODEC_ID_FFVHUFF,
1283 sizeof(HYuvContext), 1383 sizeof(HYuvContext),
1284 encode_init, 1384 encode_init,
1285 encode_frame, 1385 encode_frame,
1286 encode_end, 1386 encode_end,
1287 .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_YUV422P, -1}, 1387 .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_RGB32, -1},
1288 }; 1388 };
1289 1389
1290 #endif //CONFIG_ENCODERS 1390 #endif //CONFIG_ENCODERS