comparison alsdec.c @ 10681:997692df50c1 libavcodec

Read and decode block data in separate functions to prepare support for multi-channel correlation mode.
author thilo.borgmann
date Sun, 13 Dec 2009 15:40:26 +0000
parents 95f3daa991a2
children fadd5dfac0f0
comparison
equal deleted inserted replaced
10680:d569841bd1b7 10681:997692df50c1
149 unsigned int cur_frame_length; ///< length of the current frame to decode 149 unsigned int cur_frame_length; ///< length of the current frame to decode
150 unsigned int frame_id; ///< the frame ID / number of the current frame 150 unsigned int frame_id; ///< the frame ID / number of the current frame
151 unsigned int js_switch; ///< if true, joint-stereo decoding is enforced 151 unsigned int js_switch; ///< if true, joint-stereo decoding is enforced
152 unsigned int num_blocks; ///< number of blocks used in the current frame 152 unsigned int num_blocks; ///< number of blocks used in the current frame
153 int ltp_lag_length; ///< number of bits used for ltp lag value 153 int ltp_lag_length; ///< number of bits used for ltp lag value
154 int *use_ltp; ///< contains use_ltp flags for all channels
155 int *ltp_lag; ///< contains ltp lag values for all channels
156 int **ltp_gain; ///< gain values for ltp 5-tap filter for a channel
157 int *ltp_gain_buffer; ///< contains all gain values for ltp 5-tap filter
154 int32_t *quant_cof; ///< quantized parcor coefficients 158 int32_t *quant_cof; ///< quantized parcor coefficients
155 int32_t *lpc_cof; ///< coefficients of the direct form prediction filter 159 int32_t *lpc_cof; ///< coefficients of the direct form prediction filter
156 int32_t *prev_raw_samples; ///< contains unshifted raw samples from the previous block 160 int32_t *prev_raw_samples; ///< contains unshifted raw samples from the previous block
157 int32_t **raw_samples; ///< decoded raw samples for each channel 161 int32_t **raw_samples; ///< decoded raw samples for each channel
158 int32_t *raw_buffer; ///< contains all decoded raw samples including carryover samples 162 int32_t *raw_buffer; ///< contains all decoded raw samples including carryover samples
159 } ALSDecContext; 163 } ALSDecContext;
164
165
166 typedef struct {
167 unsigned int block_length; ///< number of samples within the block
168 unsigned int ra_block; ///< if true, this is a random access block
169 int const_block; ///< if true, this is a constant value block
170 int32_t const_val; ///< the sample value of a constant block
171 int js_blocks; ///< true if this block contains a difference signal
172 unsigned int shift_lsbs; ///< shift of values for this block
173 unsigned int opt_order; ///< prediction order of this block
174 int store_prev_samples;///< if true, carryover samples have to be stored
175 int *use_ltp; ///< if true, long-term prediction is used
176 int *ltp_lag; ///< lag value for long-term prediction
177 int *ltp_gain; ///< gain values for ltp 5-tap filter
178 int32_t *quant_cof; ///< quantized parcor coefficients
179 int32_t *lpc_cof; ///< coefficients of the direct form prediction
180 int32_t *raw_samples; ///< decoded raw samples / residuals for this block
181 int32_t *prev_raw_samples; ///< contains unshifted raw samples from the previous block
182 int32_t *raw_other; ///< decoded raw samples of the other channel of a channel pair
183 } ALSBlockData;
160 184
161 185
162 static av_cold void dprint_specific_config(ALSDecContext *ctx) 186 static av_cold void dprint_specific_config(ALSDecContext *ctx)
163 { 187 {
164 #ifdef DEBUG 188 #ifdef DEBUG
467 } 491 }
468 492
469 493
470 /** Reads the block data for a constant block 494 /** Reads the block data for a constant block
471 */ 495 */
472 static void read_const_block(ALSDecContext *ctx, int32_t *raw_samples, 496 static void read_const_block_data(ALSDecContext *ctx, ALSBlockData *bd)
473 unsigned int block_length, unsigned int *js_blocks)
474 { 497 {
475 ALSSpecificConfig *sconf = &ctx->sconf; 498 ALSSpecificConfig *sconf = &ctx->sconf;
476 AVCodecContext *avctx = ctx->avctx; 499 AVCodecContext *avctx = ctx->avctx;
477 GetBitContext *gb = &ctx->gb; 500 GetBitContext *gb = &ctx->gb;
478 int32_t const_val = 0; 501
479 unsigned int const_block, k; 502 bd->const_val = 0;
480 503 bd->const_block = get_bits1(gb); // 1 = constant value, 0 = zero block (silence)
481 const_block = get_bits1(gb); // 1 = constant value, 0 = zero block (silence) 504 bd->js_blocks = get_bits1(gb);
482 *js_blocks = get_bits1(gb);
483 505
484 // skip 5 reserved bits 506 // skip 5 reserved bits
485 skip_bits(gb, 5); 507 skip_bits(gb, 5);
486 508
487 if (const_block) { 509 if (bd->const_block) {
488 unsigned int const_val_bits = sconf->floating ? 24 : avctx->bits_per_raw_sample; 510 unsigned int const_val_bits = sconf->floating ? 24 : avctx->bits_per_raw_sample;
489 const_val = get_sbits_long(gb, const_val_bits); 511 bd->const_val = get_sbits_long(gb, const_val_bits);
490 } 512 }
513
514 // ensure constant block decoding by reusing this field
515 bd->const_block = 1;
516 }
517
518
519 /** Decodes the block data for a constant block
520 */
521 static void decode_const_block_data(ALSDecContext *ctx, ALSBlockData *bd)
522 {
523 int smp = bd->block_length;
524 int32_t val = bd->const_val;
525 int32_t *dst = bd->raw_samples;
491 526
492 // write raw samples into buffer 527 // write raw samples into buffer
493 for (k = 0; k < block_length; k++) 528 for (; smp; smp--)
494 raw_samples[k] = const_val; 529 *dst++ = val;
495 } 530 }
496 531
497 532
498 /** Reads the block data for a non-constant block 533 /** Reads the block data for a non-constant block
499 */ 534 */
500 static int read_var_block(ALSDecContext *ctx, unsigned int ra_block, 535 static int read_var_block_data(ALSDecContext *ctx, ALSBlockData *bd)
501 int32_t *raw_samples, unsigned int block_length,
502 unsigned int *js_blocks, int32_t *raw_other,
503 unsigned int *shift_lsbs)
504 { 536 {
505 ALSSpecificConfig *sconf = &ctx->sconf; 537 ALSSpecificConfig *sconf = &ctx->sconf;
506 AVCodecContext *avctx = ctx->avctx; 538 AVCodecContext *avctx = ctx->avctx;
507 GetBitContext *gb = &ctx->gb; 539 GetBitContext *gb = &ctx->gb;
508 unsigned int k; 540 unsigned int k;
509 unsigned int s[8]; 541 unsigned int s[8];
510 unsigned int sub_blocks, log2_sub_blocks, sb_length; 542 unsigned int sub_blocks, log2_sub_blocks, sb_length;
511 unsigned int opt_order = 1;
512 int32_t *quant_cof = ctx->quant_cof;
513 int32_t *lpc_cof = ctx->lpc_cof;
514 unsigned int start = 0; 543 unsigned int start = 0;
515 int smp = 0; 544 unsigned int opt_order;
516 int sb, store_prev_samples; 545 int sb;
517 int64_t y; 546 int32_t *quant_cof = bd->quant_cof;
518 int use_ltp = 0; 547
519 int ltp_lag = 0; 548
520 int ltp_gain[5]; 549 // ensure variable block decoding by reusing this field
521 550 bd->const_block = 0;
522 *js_blocks = get_bits1(gb); 551
552 bd->opt_order = 1;
553 bd->js_blocks = get_bits1(gb);
554
555 opt_order = bd->opt_order;
523 556
524 // determine the number of subblocks for entropy decoding 557 // determine the number of subblocks for entropy decoding
525 if (!sconf->bgmc && !sconf->sb_part) { 558 if (!sconf->bgmc && !sconf->sb_part) {
526 log2_sub_blocks = 0; 559 log2_sub_blocks = 0;
527 } else { 560 } else {
533 566
534 sub_blocks = 1 << log2_sub_blocks; 567 sub_blocks = 1 << log2_sub_blocks;
535 568
536 // do not continue in case of a damaged stream since 569 // do not continue in case of a damaged stream since
537 // block_length must be evenly divisible by sub_blocks 570 // block_length must be evenly divisible by sub_blocks
538 if (block_length & (sub_blocks - 1)) { 571 if (bd->block_length & (sub_blocks - 1)) {
539 av_log(avctx, AV_LOG_WARNING, 572 av_log(avctx, AV_LOG_WARNING,
540 "Block length is not evenly divisible by the number of subblocks.\n"); 573 "Block length is not evenly divisible by the number of subblocks.\n");
541 return -1; 574 return -1;
542 } 575 }
543 576
544 sb_length = block_length >> log2_sub_blocks; 577 sb_length = bd->block_length >> log2_sub_blocks;
545 578
546 579
547 if (sconf->bgmc) { 580 if (sconf->bgmc) {
548 // TODO: BGMC mode 581 // TODO: BGMC mode
549 } else { 582 } else {
551 for (k = 1; k < sub_blocks; k++) 584 for (k = 1; k < sub_blocks; k++)
552 s[k] = s[k - 1] + decode_rice(gb, 0); 585 s[k] = s[k - 1] + decode_rice(gb, 0);
553 } 586 }
554 587
555 if (get_bits1(gb)) 588 if (get_bits1(gb))
556 *shift_lsbs = get_bits(gb, 4) + 1; 589 bd->shift_lsbs = get_bits(gb, 4) + 1;
557 590
558 store_prev_samples = (*js_blocks && raw_other) || *shift_lsbs; 591 bd->store_prev_samples = (bd->js_blocks && bd->raw_other) || bd->shift_lsbs;
559 592
560 593
561 if (!sconf->rlslms) { 594 if (!sconf->rlslms) {
562 if (sconf->adapt_order) { 595 if (sconf->adapt_order) {
563 int opt_order_length = av_ceil_log2(av_clip((block_length >> 3) - 1, 596 int opt_order_length = av_ceil_log2(av_clip((bd->block_length >> 3) - 1,
564 2, sconf->max_order + 1)); 597 2, sconf->max_order + 1));
565 opt_order = get_bits(gb, opt_order_length); 598 bd->opt_order = get_bits(gb, opt_order_length);
566 } else { 599 } else {
567 opt_order = sconf->max_order; 600 bd->opt_order = sconf->max_order;
568 } 601 }
602
603 opt_order = bd->opt_order;
569 604
570 if (opt_order) { 605 if (opt_order) {
571 int add_base; 606 int add_base;
572 607
573 if (sconf->coef_table == 3) { 608 if (sconf->coef_table == 3) {
615 } 650 }
616 } 651 }
617 652
618 // read LTP gain and lag values 653 // read LTP gain and lag values
619 if (sconf->long_term_prediction) { 654 if (sconf->long_term_prediction) {
620 use_ltp = get_bits1(gb); 655 *bd->use_ltp = get_bits1(gb);
621 656
622 if (use_ltp) { 657 if (*bd->use_ltp) {
623 ltp_gain[0] = decode_rice(gb, 1) << 3; 658 bd->ltp_gain[0] = decode_rice(gb, 1) << 3;
624 ltp_gain[1] = decode_rice(gb, 2) << 3; 659 bd->ltp_gain[1] = decode_rice(gb, 2) << 3;
625 660
626 ltp_gain[2] = ltp_gain_values[get_unary(gb, 0, 4)][get_bits(gb, 2)]; 661 bd->ltp_gain[2] = ltp_gain_values[get_unary(gb, 0, 4)][get_bits(gb, 2)];
627 662
628 ltp_gain[3] = decode_rice(gb, 2) << 3; 663 bd->ltp_gain[3] = decode_rice(gb, 2) << 3;
629 ltp_gain[4] = decode_rice(gb, 1) << 3; 664 bd->ltp_gain[4] = decode_rice(gb, 1) << 3;
630 665
631 ltp_lag = get_bits(gb, ctx->ltp_lag_length); 666 *bd->ltp_lag = get_bits(gb, ctx->ltp_lag_length);
632 ltp_lag += FFMAX(4, opt_order + 1); 667 *bd->ltp_lag += FFMAX(4, opt_order + 1);
633 } 668 }
634 } 669 }
635 670
636 // read first value and residuals in case of a random access block 671 // read first value and residuals in case of a random access block
637 if (ra_block) { 672 if (bd->ra_block) {
638 if (opt_order) 673 if (opt_order)
639 raw_samples[0] = decode_rice(gb, avctx->bits_per_raw_sample - 4); 674 bd->raw_samples[0] = decode_rice(gb, avctx->bits_per_raw_sample - 4);
640 if (opt_order > 1) 675 if (opt_order > 1)
641 raw_samples[1] = decode_rice(gb, s[0] + 3); 676 bd->raw_samples[1] = decode_rice(gb, s[0] + 3);
642 if (opt_order > 2) 677 if (opt_order > 2)
643 raw_samples[2] = decode_rice(gb, s[0] + 1); 678 bd->raw_samples[2] = decode_rice(gb, s[0] + 1);
644 679
645 start = FFMIN(opt_order, 3); 680 start = FFMIN(opt_order, 3);
646 } 681 }
647 682
648 // read all residuals 683 // read all residuals
649 if (sconf->bgmc) { 684 if (sconf->bgmc) {
650 // TODO: BGMC mode 685 // TODO: BGMC mode
651 } else { 686 } else {
652 int32_t *current_res = raw_samples + start; 687 int32_t *current_res = bd->raw_samples + start;
653 688
654 for (sb = 0; sb < sub_blocks; sb++, start = 0) 689 for (sb = 0; sb < sub_blocks; sb++, start = 0)
655 for (; start < sb_length; start++) 690 for (; start < sb_length; start++)
656 *current_res++ = decode_rice(gb, s[sb]); 691 *current_res++ = decode_rice(gb, s[sb]);
657 } 692 }
658 693
694 if (!sconf->mc_coding || ctx->js_switch)
695 align_get_bits(gb);
696
697 return 0;
698 }
699
700
701 /** Decodes the block data for a non-constant block
702 */
703 static int decode_var_block_data(ALSDecContext *ctx, ALSBlockData *bd)
704 {
705 ALSSpecificConfig *sconf = &ctx->sconf;
706 unsigned int block_length = bd->block_length;
707 unsigned int smp = 0;
708 unsigned int k;
709 unsigned int opt_order = bd->opt_order;
710 int sb;
711 int64_t y;
712 int32_t *quant_cof = bd->quant_cof;
713 int32_t *lpc_cof = bd->lpc_cof;
714 int32_t *raw_samples = bd->raw_samples;
715
659 // reverse long-term prediction 716 // reverse long-term prediction
660 if (use_ltp) { 717 if (*bd->use_ltp) {
661 int ltp_smp; 718 int ltp_smp;
662 719
663 for (ltp_smp = FFMAX(ltp_lag - 2, 0); ltp_smp < block_length; ltp_smp++) { 720 for (ltp_smp = FFMAX(*bd->ltp_lag - 2, 0); ltp_smp < block_length; ltp_smp++) {
664 int center = ltp_smp - ltp_lag; 721 int center = ltp_smp - *bd->ltp_lag;
665 int begin = FFMAX(0, center - 2); 722 int begin = FFMAX(0, center - 2);
666 int end = center + 3; 723 int end = center + 3;
667 int tab = 5 - (end - begin); 724 int tab = 5 - (end - begin);
668 int base; 725 int base;
669 726
670 y = 1 << 6; 727 y = 1 << 6;
671 728
672 for (base = begin; base < end; base++, tab++) 729 for (base = begin; base < end; base++, tab++)
673 y += MUL64(ltp_gain[tab], raw_samples[base]); 730 y += MUL64(bd->ltp_gain[tab], raw_samples[base]);
674 731
675 raw_samples[ltp_smp] += y >> 7; 732 raw_samples[ltp_smp] += y >> 7;
676 } 733 }
677 } 734 }
678 735
679 // reconstruct all samples from residuals 736 // reconstruct all samples from residuals
680 if (ra_block) { 737 if (bd->ra_block) {
681 for (smp = 0; smp < opt_order; smp++) { 738 for (smp = 0; smp < opt_order; smp++) {
682 y = 1 << 19; 739 y = 1 << 19;
683 740
684 for (sb = 0; sb < smp; sb++) 741 for (sb = 0; sb < smp; sb++)
685 y += MUL64(lpc_cof[sb],raw_samples[smp - (sb + 1)]); 742 y += MUL64(lpc_cof[sb],raw_samples[smp - (sb + 1)]);
690 } else { 747 } else {
691 for (k = 0; k < opt_order; k++) 748 for (k = 0; k < opt_order; k++)
692 parcor_to_lpc(k, quant_cof, lpc_cof); 749 parcor_to_lpc(k, quant_cof, lpc_cof);
693 750
694 // store previous samples in case that they have to be altered 751 // store previous samples in case that they have to be altered
695 if (store_prev_samples) 752 if (bd->store_prev_samples)
696 memcpy(ctx->prev_raw_samples, raw_samples - sconf->max_order, 753 memcpy(bd->prev_raw_samples, raw_samples - sconf->max_order,
697 sizeof(*ctx->prev_raw_samples) * sconf->max_order); 754 sizeof(*bd->prev_raw_samples) * sconf->max_order);
698 755
699 // reconstruct difference signal for prediction (joint-stereo) 756 // reconstruct difference signal for prediction (joint-stereo)
700 if (*js_blocks && raw_other) { 757 if (bd->js_blocks && bd->raw_other) {
701 int32_t *left, *right; 758 int32_t *left, *right;
702 759
703 if (raw_other > raw_samples) { // D = R - L 760 if (bd->raw_other > raw_samples) { // D = R - L
704 left = raw_samples; 761 left = raw_samples;
705 right = raw_other; 762 right = bd->raw_other;
706 } else { // D = R - L 763 } else { // D = R - L
707 left = raw_other; 764 left = bd->raw_other;
708 right = raw_samples; 765 right = raw_samples;
709 } 766 }
710 767
711 for (sb = -1; sb >= -sconf->max_order; sb--) 768 for (sb = -1; sb >= -sconf->max_order; sb--)
712 raw_samples[sb] = right[sb] - left[sb]; 769 raw_samples[sb] = right[sb] - left[sb];
713 } 770 }
714 771
715 // reconstruct shifted signal 772 // reconstruct shifted signal
716 if (*shift_lsbs) 773 if (bd->shift_lsbs)
717 for (sb = -1; sb >= -sconf->max_order; sb--) 774 for (sb = -1; sb >= -sconf->max_order; sb--)
718 raw_samples[sb] >>= *shift_lsbs; 775 raw_samples[sb] >>= bd->shift_lsbs;
719 } 776 }
720 777
721 // reconstruct raw samples 778 // reconstruct raw samples
722 for (; smp < block_length; smp++) { 779 for (; smp < bd->block_length; smp++) {
723 y = 1 << 19; 780 y = 1 << 19;
724 781
725 for (sb = 0; sb < opt_order; sb++) 782 for (sb = 0; sb < opt_order; sb++)
726 y += MUL64(lpc_cof[sb],raw_samples[smp - (sb + 1)]); 783 y += MUL64(bd->lpc_cof[sb],raw_samples[smp - (sb + 1)]);
727 784
728 raw_samples[smp] -= y >> 20; 785 raw_samples[smp] -= y >> 20;
729 } 786 }
730 787
731 // restore previous samples in case that they have been altered 788 // restore previous samples in case that they have been altered
732 if (store_prev_samples) 789 if (bd->store_prev_samples)
733 memcpy(raw_samples - sconf->max_order, ctx->prev_raw_samples, 790 memcpy(raw_samples - sconf->max_order, bd->prev_raw_samples,
734 sizeof(*raw_samples) * sconf->max_order); 791 sizeof(*raw_samples) * sconf->max_order);
735 792
736 return 0; 793 return 0;
737 } 794 }
738 795
739 796
740 /** Reads the block data. 797 /** Reads the block data.
741 */ 798 */
742 static int read_block_data(ALSDecContext *ctx, unsigned int ra_block, 799 static int read_block(ALSDecContext *ctx, ALSBlockData *bd)
743 int32_t *raw_samples, unsigned int block_length, 800 {
744 unsigned int *js_blocks, int32_t *raw_other)
745 {
746 ALSSpecificConfig *sconf = &ctx->sconf;
747 GetBitContext *gb = &ctx->gb; 801 GetBitContext *gb = &ctx->gb;
748 unsigned int shift_lsbs = 0;
749 unsigned int k;
750 802
751 // read block type flag and read the samples accordingly 803 // read block type flag and read the samples accordingly
752 if (get_bits1(gb)) { 804 if (get_bits1(gb)) {
753 if (read_var_block(ctx, ra_block, raw_samples, block_length, js_blocks, 805 if (read_var_block_data(ctx, bd))
754 raw_other, &shift_lsbs))
755 return -1; 806 return -1;
756 } else { 807 } else {
757 read_const_block(ctx, raw_samples, block_length, js_blocks); 808 read_const_block_data(ctx, bd);
758 } 809 }
810
811 return 0;
812 }
813
814
815 /** Decodes the block data.
816 */
817 static int decode_block(ALSDecContext *ctx, ALSBlockData *bd)
818 {
819 unsigned int smp;
820
821 // read block type flag and read the samples accordingly
822 if (bd->const_block)
823 decode_const_block_data(ctx, bd);
824 else if (decode_var_block_data(ctx, bd))
825 return -1;
759 826
760 // TODO: read RLSLMS extension data 827 // TODO: read RLSLMS extension data
761 828
762 if (!sconf->mc_coding || ctx->js_switch) 829 if (bd->shift_lsbs)
763 align_get_bits(gb); 830 for (smp = 0; smp < bd->block_length; smp++)
764 831 bd->raw_samples[smp] <<= bd->shift_lsbs;
765 if (shift_lsbs)
766 for (k = 0; k < block_length; k++)
767 raw_samples[k] <<= shift_lsbs;
768 832
769 return 0; 833 return 0;
834 }
835
836
837 /** Reads and decodes block data successively.
838 */
839 static int read_decode_block(ALSDecContext *ctx, ALSBlockData *bd)
840 {
841 int ret;
842
843 ret = read_block(ctx, bd);
844
845 if (ret)
846 return ret;
847
848 ret = decode_block(ctx, bd);
849
850 return ret;
770 } 851 }
771 852
772 853
773 /** Computes the number of samples left to decode for the current frame and 854 /** Computes the number of samples left to decode for the current frame and
774 * sets these samples to zero. 855 * sets these samples to zero.
790 */ 871 */
791 static int decode_blocks_ind(ALSDecContext *ctx, unsigned int ra_frame, 872 static int decode_blocks_ind(ALSDecContext *ctx, unsigned int ra_frame,
792 unsigned int c, const unsigned int *div_blocks, 873 unsigned int c, const unsigned int *div_blocks,
793 unsigned int *js_blocks) 874 unsigned int *js_blocks)
794 { 875 {
795 int32_t *raw_sample;
796 unsigned int b; 876 unsigned int b;
797 raw_sample = ctx->raw_samples[c]; 877 ALSBlockData bd;
878
879 memset(&bd, 0, sizeof(ALSBlockData));
880
881 bd.ra_block = ra_frame;
882 bd.use_ltp = ctx->use_ltp;
883 bd.ltp_lag = ctx->ltp_lag;
884 bd.ltp_gain = ctx->ltp_gain[0];
885 bd.quant_cof = ctx->quant_cof;
886 bd.lpc_cof = ctx->lpc_cof;
887 bd.prev_raw_samples = ctx->prev_raw_samples;
888 bd.raw_samples = ctx->raw_samples[c];
889
798 890
799 for (b = 0; b < ctx->num_blocks; b++) { 891 for (b = 0; b < ctx->num_blocks; b++) {
800 if (read_block_data(ctx, ra_frame, raw_sample, 892 bd.shift_lsbs = 0;
801 div_blocks[b], &js_blocks[0], NULL)) { 893 bd.block_length = div_blocks[b];
894
895 if (read_decode_block(ctx, &bd)) {
802 // damaged block, write zero for the rest of the frame 896 // damaged block, write zero for the rest of the frame
803 zero_remaining(b, ctx->num_blocks, div_blocks, raw_sample); 897 zero_remaining(b, ctx->num_blocks, div_blocks, bd.raw_samples);
804 return -1; 898 return -1;
805 } 899 }
806 raw_sample += div_blocks[b]; 900 bd.raw_samples += div_blocks[b];
807 ra_frame = 0; 901 bd.ra_block = 0;
808 } 902 }
809 903
810 return 0; 904 return 0;
811 } 905 }
812 906
817 unsigned int c, const unsigned int *div_blocks, 911 unsigned int c, const unsigned int *div_blocks,
818 unsigned int *js_blocks) 912 unsigned int *js_blocks)
819 { 913 {
820 ALSSpecificConfig *sconf = &ctx->sconf; 914 ALSSpecificConfig *sconf = &ctx->sconf;
821 unsigned int offset = 0; 915 unsigned int offset = 0;
822 int32_t *raw_samples_R;
823 int32_t *raw_samples_L;
824 unsigned int b; 916 unsigned int b;
917 ALSBlockData bd[2];
918
919 memset(bd, 0, 2 * sizeof(ALSBlockData));
920
921 bd[0].ra_block = ra_frame;
922 bd[0].use_ltp = ctx->use_ltp;
923 bd[0].ltp_lag = ctx->ltp_lag;
924 bd[0].ltp_gain = ctx->ltp_gain[0];
925 bd[0].quant_cof = ctx->quant_cof;
926 bd[0].lpc_cof = ctx->lpc_cof;
927 bd[0].prev_raw_samples = ctx->prev_raw_samples;
928 bd[0].js_blocks = *js_blocks;
929
930 bd[1].ra_block = ra_frame;
931 bd[1].use_ltp = ctx->use_ltp;
932 bd[1].ltp_lag = ctx->ltp_lag;
933 bd[1].ltp_gain = ctx->ltp_gain[0];
934 bd[1].quant_cof = ctx->quant_cof;
935 bd[1].lpc_cof = ctx->lpc_cof;
936 bd[1].prev_raw_samples = ctx->prev_raw_samples;
937 bd[1].js_blocks = *(js_blocks + 1);
825 938
826 // decode all blocks 939 // decode all blocks
827 for (b = 0; b < ctx->num_blocks; b++) { 940 for (b = 0; b < ctx->num_blocks; b++) {
828 unsigned int s; 941 unsigned int s;
829 raw_samples_L = ctx->raw_samples[c ] + offset; 942
830 raw_samples_R = ctx->raw_samples[c + 1] + offset; 943 bd[0].shift_lsbs = 0;
831 if (read_block_data(ctx, ra_frame, raw_samples_L, div_blocks[b], 944 bd[1].shift_lsbs = 0;
832 &js_blocks[0], raw_samples_R) || 945
833 read_block_data(ctx, ra_frame, raw_samples_R, div_blocks[b], 946 bd[0].block_length = div_blocks[b];
834 &js_blocks[1], raw_samples_L)) { 947 bd[1].block_length = div_blocks[b];
948
949 bd[0].raw_samples = ctx->raw_samples[c ] + offset;
950 bd[1].raw_samples = ctx->raw_samples[c + 1] + offset;
951
952 bd[0].raw_other = bd[1].raw_samples;
953 bd[1].raw_other = bd[0].raw_samples;
954
955 if(read_decode_block(ctx, &bd[0]) || read_decode_block(ctx, &bd[1])) {
835 // damaged block, write zero for the rest of the frame 956 // damaged block, write zero for the rest of the frame
836 zero_remaining(b, ctx->num_blocks, div_blocks, raw_samples_L); 957 zero_remaining(b, ctx->num_blocks, div_blocks, bd[0].raw_samples);
837 zero_remaining(b, ctx->num_blocks, div_blocks, raw_samples_R); 958 zero_remaining(b, ctx->num_blocks, div_blocks, bd[1].raw_samples);
838 return -1; 959 return -1;
839 } 960 }
840 961
841 // reconstruct joint-stereo blocks 962 // reconstruct joint-stereo blocks
842 if (js_blocks[0]) { 963 if (bd[0].js_blocks) {
843 if (js_blocks[1]) 964 if (bd[1].js_blocks)
844 av_log(ctx->avctx, AV_LOG_WARNING, "Invalid channel pair!\n"); 965 av_log(ctx->avctx, AV_LOG_WARNING, "Invalid channel pair!\n");
845 966
846 for (s = 0; s < div_blocks[b]; s++) 967 for (s = 0; s < div_blocks[b]; s++)
847 raw_samples_L[s] = raw_samples_R[s] - raw_samples_L[s]; 968 bd[0].raw_samples[s] = bd[1].raw_samples[s] - bd[0].raw_samples[s];
848 } else if (js_blocks[1]) { 969 } else if (bd[1].js_blocks) {
849 for (s = 0; s < div_blocks[b]; s++) 970 for (s = 0; s < div_blocks[b]; s++)
850 raw_samples_R[s] = raw_samples_R[s] + raw_samples_L[s]; 971 bd[1].raw_samples[s] = bd[1].raw_samples[s] + bd[0].raw_samples[s];
851 } 972 }
852 973
853 offset += div_blocks[b]; 974 offset += div_blocks[b];
854 ra_frame = 0; 975 bd[0].ra_block = 0;
976 bd[1].ra_block = 0;
855 } 977 }
856 978
857 // store carryover raw samples, 979 // store carryover raw samples,
858 // the others channel raw samples are stored by the calling function. 980 // the others channel raw samples are stored by the calling function.
859 memmove(ctx->raw_samples[c] - sconf->max_order, 981 memmove(ctx->raw_samples[c] - sconf->max_order,
1013 { 1135 {
1014 ALSDecContext *ctx = avctx->priv_data; 1136 ALSDecContext *ctx = avctx->priv_data;
1015 1137
1016 av_freep(&ctx->sconf.chan_pos); 1138 av_freep(&ctx->sconf.chan_pos);
1017 1139
1140 av_freep(&ctx->use_ltp);
1141 av_freep(&ctx->ltp_lag);
1142 av_freep(&ctx->ltp_gain);
1143 av_freep(&ctx->ltp_gain_buffer);
1018 av_freep(&ctx->quant_cof); 1144 av_freep(&ctx->quant_cof);
1019 av_freep(&ctx->lpc_cof); 1145 av_freep(&ctx->lpc_cof);
1020 av_freep(&ctx->prev_raw_samples); 1146 av_freep(&ctx->prev_raw_samples);
1021 av_freep(&ctx->raw_samples); 1147 av_freep(&ctx->raw_samples);
1022 av_freep(&ctx->raw_buffer); 1148 av_freep(&ctx->raw_buffer);
1029 */ 1155 */
1030 static av_cold int decode_init(AVCodecContext *avctx) 1156 static av_cold int decode_init(AVCodecContext *avctx)
1031 { 1157 {
1032 unsigned int c; 1158 unsigned int c;
1033 unsigned int channel_size; 1159 unsigned int channel_size;
1160 int num_buffers;
1034 ALSDecContext *ctx = avctx->priv_data; 1161 ALSDecContext *ctx = avctx->priv_data;
1035 ALSSpecificConfig *sconf = &ctx->sconf; 1162 ALSSpecificConfig *sconf = &ctx->sconf;
1036 ctx->avctx = avctx; 1163 ctx->avctx = avctx;
1037 1164
1038 if (!avctx->extradata) { 1165 if (!avctx->extradata) {
1061 } 1188 }
1062 1189
1063 // set lag value for long-term prediction 1190 // set lag value for long-term prediction
1064 ctx->ltp_lag_length = 8 + (avctx->sample_rate >= 96000) + 1191 ctx->ltp_lag_length = 8 + (avctx->sample_rate >= 96000) +
1065 (avctx->sample_rate >= 192000); 1192 (avctx->sample_rate >= 192000);
1193
1194 // allocate quantized parcor coefficient buffer
1195 num_buffers = sconf->mc_coding ? avctx->channels : 1;
1196
1197 // allocate and assign lag and gain data buffer for ltp mode
1198 ctx->use_ltp = av_mallocz(sizeof(*ctx->use_ltp) * num_buffers);
1199 ctx->ltp_lag = av_malloc (sizeof(*ctx->ltp_lag) * num_buffers);
1200 ctx->ltp_gain = av_malloc (sizeof(*ctx->ltp_gain) * num_buffers);
1201 ctx->ltp_gain_buffer = av_malloc (sizeof(*ctx->ltp_gain_buffer) *
1202 num_buffers * 5);
1203
1204 if (!ctx->use_ltp || !ctx->ltp_lag ||
1205 !ctx->ltp_gain || !ctx->ltp_gain_buffer) {
1206 av_log(avctx, AV_LOG_ERROR, "Allocating buffer memory failed.\n");
1207 decode_end(avctx);
1208 return AVERROR(ENOMEM);
1209 }
1210
1211 for (c = 0; c < num_buffers; c++)
1212 ctx->ltp_gain[c] = ctx->ltp_gain_buffer + c * 5;
1066 1213
1067 avctx->frame_size = sconf->frame_length; 1214 avctx->frame_size = sconf->frame_length;
1068 channel_size = sconf->frame_length + sconf->max_order; 1215 channel_size = sconf->frame_length + sconf->max_order;
1069 1216
1070 ctx->prev_raw_samples = av_malloc (sizeof(*ctx->prev_raw_samples) * sconf->max_order); 1217 ctx->prev_raw_samples = av_malloc (sizeof(*ctx->prev_raw_samples) * sconf->max_order);