shithub: libvpx

Download patch

ref: 8b71b8a6de7a5e9537556f0424f76d41e8a4236d
parent: dc12e6c0dc21b63ceedd8e33b936177a552417ad
parent: dca8ad178c41ccb185c0240281f4096726359689
author: John Koleszar <jkoleszar@google.com>
date: Tue Apr 2 17:49:03 EDT 2013

Merge "Renaming sb32_coded and sb64_coded fields." into experimental

--- a/vp9/common/vp9_onyxc_int.h
+++ b/vp9/common/vp9_onyxc_int.h
@@ -271,8 +271,8 @@
   vp9_prob prob_intra_coded;
   vp9_prob prob_last_coded;
   vp9_prob prob_gf_coded;
-  vp9_prob sb32_coded;
-  vp9_prob sb64_coded;
+  vp9_prob prob_sb32_coded;
+  vp9_prob prob_sb64_coded;
 
   // Context probabilities when using predictive coding of segment id
   vp9_prob segment_pred_probs[PREDICTION_PROBS];
--- a/vp9/decoder/vp9_decodframe.c
+++ b/vp9/decoder/vp9_decodframe.c
@@ -992,7 +992,7 @@
 
   for (mb_col = pc->cur_tile_mb_col_start;
        mb_col < pc->cur_tile_mb_col_end; mb_col += 4) {
-    if (vp9_read(bc, pc->sb64_coded)) {
+    if (vp9_read(bc, pc->prob_sb64_coded)) {
 #ifdef DEC_DEBUG
       dec_debug = (pc->current_video_frame == 11 && pc->show_frame &&
                    mb_row == 8 && mb_col == 0);
@@ -1018,7 +1018,7 @@
 
         xd->sb_index = j;
 
-        if (vp9_read(bc, pc->sb32_coded)) {
+        if (vp9_read(bc, pc->prob_sb32_coded)) {
 #ifdef DEC_DEBUG
           dec_debug = (pc->current_video_frame == 11 && pc->show_frame &&
                        mb_row + y_idx_sb == 8 && mb_col + x_idx_sb == 0);
@@ -1696,8 +1696,8 @@
     }
   }
 
-  pc->sb64_coded = vp9_read_prob(&header_bc);
-  pc->sb32_coded = vp9_read_prob(&header_bc);
+  pc->prob_sb64_coded = vp9_read_prob(&header_bc);
+  pc->prob_sb32_coded = vp9_read_prob(&header_bc);
   xd->lossless = vp9_read_bit(&header_bc);
   if (xd->lossless) {
     pc->txfm_mode = ONLY_4X4;
--- a/vp9/encoder/vp9_bitstream.c
+++ b/vp9/encoder/vp9_bitstream.c
@@ -1690,7 +1690,7 @@
     m = m_ptr;
     for (mb_col = c->cur_tile_mb_col_start;
          mb_col < c->cur_tile_mb_col_end; mb_col += 4, m += 4) {
-      vp9_write(bc, m->mbmi.sb_type == BLOCK_SIZE_SB64X64, c->sb64_coded);
+      vp9_write(bc, m->mbmi.sb_type == BLOCK_SIZE_SB64X64, c->prob_sb64_coded);
       if (m->mbmi.sb_type == BLOCK_SIZE_SB64X64) {
         write_modes_b(cpi, m, bc, tok, tok_end, mb_row, mb_col);
       } else {
@@ -1704,7 +1704,7 @@
               mb_row + y_idx_sb >= c->mb_rows)
             continue;
 
-          vp9_write(bc, sb_m->mbmi.sb_type, c->sb32_coded);
+          vp9_write(bc, sb_m->mbmi.sb_type, c->prob_sb32_coded);
           if (sb_m->mbmi.sb_type) {
             assert(sb_m->mbmi.sb_type == BLOCK_SIZE_SB32X32);
             write_modes_b(cpi, sb_m, bc, tok, tok_end,
@@ -2494,10 +2494,10 @@
     }
   }
 
-  pc->sb64_coded = get_binary_prob(cpi->sb64_count[0], cpi->sb64_count[1]);
-  vp9_write_literal(&header_bc, pc->sb64_coded, 8);
-  pc->sb32_coded = get_binary_prob(cpi->sb32_count[0], cpi->sb32_count[1]);
-  vp9_write_literal(&header_bc, pc->sb32_coded, 8);
+  pc->prob_sb64_coded = get_binary_prob(cpi->sb64_count[0], cpi->sb64_count[1]);
+  vp9_write_literal(&header_bc, pc->prob_sb64_coded, 8);
+  pc->prob_sb32_coded = get_binary_prob(cpi->sb32_count[0], cpi->sb32_count[1]);
+  vp9_write_literal(&header_bc, pc->prob_sb32_coded, 8);
 
   vp9_write_bit(&header_bc, cpi->mb.e_mbd.lossless);
   if (cpi->mb.e_mbd.lossless) {
--- a/vp9/encoder/vp9_encodeframe.c
+++ b/vp9/encoder/vp9_encodeframe.c
@@ -1086,7 +1086,7 @@
       splitmodes_used = pick_mb_modes(cpi, mb_row + y_idx, mb_col + x_idx,
                                       tp, &mb_rate, &mb_dist);
 
-      mb_rate += vp9_cost_bit(cm->sb32_coded, 0);
+      mb_rate += vp9_cost_bit(cm->prob_sb32_coded, 0);
 
       if (cpi->sf.splitmode_breakout) {
         sb32_skip = splitmodes_used;
@@ -1099,7 +1099,7 @@
         /* Pick a mode assuming that it applies to all 4 of the MBs in the SB */
         pick_sb_modes(cpi, mb_row + y_idx, mb_col + x_idx,
                       tp, &sb_rate, &sb_dist);
-        sb_rate += vp9_cost_bit(cm->sb32_coded, 1);
+        sb_rate += vp9_cost_bit(cm->prob_sb32_coded, 1);
       }
 
       /* Decide whether to encode as a SB or 4xMBs */
@@ -1131,13 +1131,13 @@
 
     memcpy(cm->above_context + mb_col, &a, sizeof(a));
     memcpy(cm->left_context, &l, sizeof(l));
-    sb32_rate += vp9_cost_bit(cm->sb64_coded, 0);
+    sb32_rate += vp9_cost_bit(cm->prob_sb64_coded, 0);
 
     if (!sb64_skip &&
         !(((cm->mb_cols & 3) && mb_col + 3 >= cm->mb_cols) ||
           ((cm->mb_rows & 3) && mb_row + 3 >= cm->mb_rows))) {
       pick_sb64_modes(cpi, mb_row, mb_col, tp, &sb64_rate, &sb64_dist);
-      sb64_rate += vp9_cost_bit(cm->sb64_coded, 1);
+      sb64_rate += vp9_cost_bit(cm->prob_sb64_coded, 1);
     }
 
     /* Decide whether to encode as a SB or 4xMBs */
--- a/vp9/encoder/vp9_onyx_if.c
+++ b/vp9/encoder/vp9_onyx_if.c
@@ -1374,8 +1374,8 @@
   cm->prob_last_coded               = 128;
   cm->prob_gf_coded                 = 128;
   cm->prob_intra_coded              = 63;
-  cm->sb32_coded                    = 200;
-  cm->sb64_coded                    = 200;
+  cm->prob_sb32_coded               = 200;
+  cm->prob_sb64_coded               = 200;
   for (i = 0; i < COMP_PRED_CONTEXTS; i++)
     cm->prob_comppred[i]         = 128;
   for (i = 0; i < TX_SIZE_MAX_SB - 1; i++)