shithub: libvpx

Download patch

ref: 9ed874713fc231c2dee2dd3535ae05cd28395377
parent: 2578b767c372f3d1b4c8c8d0c1461c1ec58e9544
author: Scott LaVarnway <slavarnway@google.com>
date: Thu Mar 8 10:27:41 EST 2012

threading.c refactoring

Added recon above/left to MACROBLOCKD
Reworked decode_macroblock

Change-Id: I9c26870af75797134f410acbd02942065b3495c1

--- a/vp8/common/blockd.h
+++ b/vp8/common/blockd.h
@@ -220,6 +220,10 @@
     int up_available;
     int left_available;
 
+    unsigned char *recon_above[3];
+    unsigned char *recon_left[3];
+    int recon_left_stride[2];
+
     /* Y,U,V,Y2 */
     ENTROPY_CONTEXT_PLANES *above_context;
     ENTROPY_CONTEXT_PLANES *left_context;
--- a/vp8/decoder/decodframe.c
+++ b/vp8/decoder/decodframe.c
@@ -153,24 +153,23 @@
     }
 #endif
 
-
     /* do prediction */
     if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
     {
         vp8_build_intra_predictors_mbuv_s(xd,
-                                          xd->dst.u_buffer - xd->dst.uv_stride,
-                                          xd->dst.v_buffer - xd->dst.uv_stride,
-                                          xd->dst.u_buffer - 1,
-                                          xd->dst.v_buffer - 1,
-                                          xd->dst.uv_stride,
+                                          xd->recon_above[1],
+                                          xd->recon_above[2],
+                                          xd->recon_left[1],
+                                          xd->recon_left[2],
+                                          xd->recon_left_stride[1],
                                           xd->dst.u_buffer, xd->dst.v_buffer);
 
         if (mode != B_PRED)
         {
             vp8_build_intra_predictors_mby_s(xd,
-                                                 xd->dst.y_buffer - xd->dst.y_stride,
-                                                 xd->dst.y_buffer - 1,
-                                                 xd->dst.y_stride,
+                                                 xd->recon_above[0],
+                                                 xd->recon_left[0],
+                                                 xd->recon_left_stride[0],
                                                  xd->dst.y_buffer);
         }
         else
@@ -183,7 +182,7 @@
             if(xd->mode_info_context->mbmi.mb_skip_coeff)
                 vpx_memset(xd->eobs, 0, 25);
 
-            intra_prediction_down_copy(xd, xd->dst.y_buffer - dst_stride + 16);
+            intra_prediction_down_copy(xd, xd->recon_above[0] + 16);
 
             for (i = 0; i < 16; i++)
             {
@@ -383,6 +382,22 @@
         xd->mb_to_top_edge = -((mb_row * 16)) << 3;
         xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
 
+        xd->recon_above[0] = dst_buffer[0] + recon_yoffset;
+        xd->recon_above[1] = dst_buffer[1] + recon_uvoffset;
+        xd->recon_above[2] = dst_buffer[2] + recon_uvoffset;
+
+        xd->recon_left[0] = xd->recon_above[0] - 1;
+        xd->recon_left[1] = xd->recon_above[1] - 1;
+        xd->recon_left[2] = xd->recon_above[2] - 1;
+
+        xd->recon_above[0] -= xd->dst.y_stride;
+        xd->recon_above[1] -= xd->dst.uv_stride;
+        xd->recon_above[2] -= xd->dst.uv_stride;
+
+        //TODO: move to outside row loop
+        xd->recon_left_stride[0] = xd->dst.y_stride;
+        xd->recon_left_stride[1] = xd->dst.uv_stride;
+
         for (mb_col = 0; mb_col < pc->mb_cols; mb_col++)
         {
             /* Distance of Mb to the various image edges.
@@ -436,6 +451,14 @@
 
             /* check if the boolean decoder has suffered an error */
             xd->corrupted |= vp8dx_bool_error(xd->current_bc);
+
+            xd->recon_above[0] += 16;
+            xd->recon_above[1] += 8;
+            xd->recon_above[2] += 8;
+            xd->recon_left[0] += 16;
+            xd->recon_left[1] += 8;
+            xd->recon_left[2] += 8;
+
 
             recon_yoffset += 16;
             recon_uvoffset += 8;
--- a/vp8/decoder/threading.c
+++ b/vp8/decoder/threading.c
@@ -82,11 +82,13 @@
         pbi->mt_current_mb_col[i]=-1;
 }
 
-
-static void decode_macroblock(VP8D_COMP *pbi, MACROBLOCKD *xd, int mb_row, int mb_col)
+static void decode_macroblock(VP8D_COMP *pbi, MACROBLOCKD *xd, unsigned int mb_idx)
 {
-    int throw_residual = 0;
+    MB_PREDICTION_MODE mode;
     int i;
+#if CONFIG_ERROR_CONCEALMENT
+    int corruption_detected = 0;
+#endif
 
     if (xd->mode_info_context->mbmi.mb_skip_coeff)
     {
@@ -101,64 +103,130 @@
         xd->mode_info_context->mbmi.mb_skip_coeff = (eobtotal==0);
     }
 
+    mode = xd->mode_info_context->mbmi.mode;
+
     if (xd->segmentation_enabled)
         mb_init_dequantizer(pbi, xd);
 
-    /* do prediction */
-    if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
+
+#if CONFIG_ERROR_CONCEALMENT
+
+    if(pbi->ec_active)
     {
-        if (pbi->common.filter_level)
+        int throw_residual;
+        /* When we have independent partitions we can apply residual even
+         * though other partitions within the frame are corrupt.
+         */
+        throw_residual = (!pbi->independent_partitions &&
+                          pbi->frame_corrupt_residual);
+        throw_residual = (throw_residual || vp8dx_bool_error(xd->current_bc));
+
+        if ((mb_idx >= pbi->mvs_corrupt_from_mb || throw_residual))
         {
-            unsigned char *uabove_row;
-            unsigned char *vabove_row;
-            unsigned char * uleft_col;
-            unsigned char * vleft_col;
-            uabove_row = pbi->mt_uabove_row[mb_row] + mb_col*8 +16;
-            vabove_row = pbi->mt_vabove_row[mb_row] + mb_col*8 +16;
-            uleft_col = pbi->mt_uleft_col[mb_row];
-            vleft_col = pbi->mt_vleft_col[mb_row];
-            vp8_build_intra_predictors_mbuv_s(xd, uabove_row,
-                                            vabove_row,
-                                            uleft_col,
-                                            vleft_col,
-                                            1,
-                                            xd->dst.u_buffer, xd->dst.v_buffer);
+            /* MB with corrupt residuals or corrupt mode/motion vectors.
+             * Better to use the predictor as reconstruction.
+             */
+            pbi->frame_corrupt_residual = 1;
+            vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
+            vp8_conceal_corrupt_mb(xd);
 
-            if (xd->mode_info_context->mbmi.mode != B_PRED)
-            {
-                unsigned char *yabove_row;
-                unsigned char *yleft_col;
 
-                yabove_row = pbi->mt_yabove_row[mb_row] + mb_col*16 +32;
-                yleft_col = pbi->mt_yleft_col[mb_row];
+            corruption_detected = 1;
 
-                vp8_build_intra_predictors_mby_s(xd,
-                                                     yabove_row,
-                                                     yleft_col,
-                                                     1,
-                                                     xd->dst.y_buffer);
-            }
+            /* force idct to be skipped for B_PRED and use the
+             * prediction only for reconstruction
+             * */
+            vpx_memset(xd->eobs, 0, 25);
         }
+    }
+#endif
+
+    /* do prediction */
+    if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
+    {
+        vp8_build_intra_predictors_mbuv_s(xd,
+                                          xd->recon_above[1],
+                                          xd->recon_above[2],
+                                          xd->recon_left[1],
+                                          xd->recon_left[2],
+                                          xd->recon_left_stride[1],
+                                          xd->dst.u_buffer, xd->dst.v_buffer);
+
+        if (mode != B_PRED)
+        {
+            vp8_build_intra_predictors_mby_s(xd,
+                                                 xd->recon_above[0],
+                                                 xd->recon_left[0],
+                                                 xd->recon_left_stride[0],
+                                                 xd->dst.y_buffer);
+        }
         else
         {
-            vp8_build_intra_predictors_mbuv_s(xd,
-                                        xd->dst.u_buffer - xd->dst.uv_stride,
-                                        xd->dst.v_buffer - xd->dst.uv_stride,
-                                        xd->dst.u_buffer - 1,
-                                        xd->dst.v_buffer - 1,
-                                        xd->dst.uv_stride,
-                                        xd->dst.u_buffer, xd->dst.v_buffer);
+            short *DQC = xd->dequant_y1;
+            int dst_stride = xd->dst.y_stride;
+            unsigned char *base_dst = xd->dst.y_buffer;
 
-            if (xd->mode_info_context->mbmi.mode != B_PRED)
+            /* clear out residual eob info */
+            if(xd->mode_info_context->mbmi.mb_skip_coeff)
+                vpx_memset(xd->eobs, 0, 25);
+
+            intra_prediction_down_copy(xd, xd->recon_above[0] + 16);
+
+            for (i = 0; i < 16; i++)
             {
-                vp8_build_intra_predictors_mby_s(xd,
-                                                     xd->dst.y_buffer - xd->dst.y_stride,
-                                                     xd->dst.y_buffer - 1,
-                                                     xd->dst.y_stride,
-                                                     xd->dst.y_buffer);
+                BLOCKD *b = &xd->block[i];
+                int b_mode = xd->mode_info_context->bmi[i].as_mode;
+                unsigned char *yabove;
+                unsigned char *yleft;
+                int left_stride;
+                unsigned char top_left;
+
+                /*Caution: For some b_mode, it needs 8 pixels (4 above + 4 above-right).*/
+                if (i < 4 && pbi->common.filter_level)
+                    yabove = xd->recon_above[0] + b->offset; //i*4;
+                else
+                    yabove = (base_dst - dst_stride) + b->offset;
+
+                if (i%4==0 && pbi->common.filter_level)
+                {
+                    yleft = xd->recon_left[0] + i;
+                    left_stride = 1;
+                }
+                else
+                {
+                    yleft = (base_dst  - 1) + b->offset;
+                    left_stride = dst_stride;
+                }
+
+                if ((i==4 || i==8 || i==12) && pbi->common.filter_level)
+                    top_left = *(xd->recon_left[0] + i - 1);
+                else
+                    top_left = yabove[-1];
+
+                vp8_intra4x4_predict_d_c(yabove, yleft, left_stride,
+                                       b_mode,
+                                       base_dst + b->offset, dst_stride,
+                                       top_left);
+
+                if (xd->eobs[i] )
+                {
+                    if (xd->eobs[i] > 1)
+                    {
+                        vp8_dequant_idct_add
+                            (b->qcoeff, DQC,
+                            base_dst + b->offset, dst_stride);
+                    }
+                    else
+                    {
+                        vp8_dc_only_idct_add
+                            (b->qcoeff[0] * DQC[0],
+                            base_dst + b->offset, dst_stride,
+                            base_dst + b->offset, dst_stride);
+                        ((int *)b->qcoeff)[0] = 0;
+                    }
+                }
             }
         }
-
     }
     else
     {
@@ -165,144 +233,66 @@
         vp8_build_inter_predictors_mb(xd);
     }
 
-    /* When we have independent partitions we can apply residual even
-     * though other partitions within the frame are corrupt.
-     */
-    throw_residual = (!pbi->independent_partitions &&
-                      pbi->frame_corrupt_residual);
-    throw_residual = (throw_residual || vp8dx_bool_error(xd->current_bc));
 
 #if CONFIG_ERROR_CONCEALMENT
-    if (pbi->ec_active &&
-        (mb_row * pbi->common.mb_cols + mb_col >= pbi->mvs_corrupt_from_mb ||
-         throw_residual))
+    if (corruption_detected)
     {
-        /* MB with corrupt residuals or corrupt mode/motion vectors.
-         * Better to use the predictor as reconstruction.
-         */
-        pbi->frame_corrupt_residual = 1;
-        vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
-        vp8_conceal_corrupt_mb(xd);
         return;
     }
 #endif
 
-    /* dequantization and idct */
-    if (xd->mode_info_context->mbmi.mode == B_PRED)
+    if(!xd->mode_info_context->mbmi.mb_skip_coeff)
     {
-        short *DQC = xd->dequant_y1;
-        int dst_stride = xd->dst.y_stride;
-        unsigned char *base_dst = xd->dst.y_buffer;
-        unsigned char *above_right_src;
-
-        if (pbi->common.filter_level)
-            above_right_src = pbi->mt_yabove_row[mb_row] + mb_col*16 + 32 +16;
-        else
-            above_right_src = xd->dst.y_buffer - dst_stride + 16;
-
-        intra_prediction_down_copy(xd, above_right_src);
-
-
-        for (i = 0; i < 16; i++)
+        /* dequantization and idct */
+        if (mode != B_PRED)
         {
-            BLOCKD *b = &xd->block[i];
-            int b_mode = xd->mode_info_context->bmi[i].as_mode;
-            unsigned char *yabove;
-            unsigned char *yleft;
-            int left_stride;
-            unsigned char top_left;
+            short *DQC = xd->dequant_y1;
 
-            /*Caution: For some b_mode, it needs 8 pixels (4 above + 4 above-right).*/
-            if (i < 4 && pbi->common.filter_level)
-                yabove = pbi->mt_yabove_row[mb_row] + mb_col*16 + i*4 + 32;
-            else
-                yabove = base_dst + b->offset - dst_stride;
-
-            if (i%4==0 && pbi->common.filter_level)
+            if (mode != SPLITMV)
             {
-                yleft = pbi->mt_yleft_col[mb_row] + i;
-                left_stride = 1;
-            }
-            else
-            {
-                yleft = base_dst + b->offset - 1;
-                left_stride = dst_stride;
-            }
+                BLOCKD *b = &xd->block[24];
 
-            if ((i==4 || i==8 || i==12) && pbi->common.filter_level)
-                top_left = pbi->mt_yleft_col[mb_row][i-1];
-            else
-                top_left = yabove[-1];
-
-            vp8_intra4x4_predict_d_c(yabove, yleft, left_stride,
-                                   b_mode,
-                                   base_dst + b->offset, dst_stride,
-                                   top_left);
-
-            if (xd->eobs[i] )
-            {
-                if (xd->eobs[i] > 1)
+                /* do 2nd order transform on the dc block */
+                if (xd->eobs[24] > 1)
                 {
-                    vp8_dequant_idct_add
-                        (b->qcoeff, DQC,
-                        base_dst + b->offset, dst_stride);
+                    vp8_dequantize_b(b, xd->dequant_y2);
+
+                    vp8_short_inv_walsh4x4(&b->dqcoeff[0],
+                        xd->qcoeff);
+                    ((int *)b->qcoeff)[0] = 0;
+                    ((int *)b->qcoeff)[1] = 0;
+                    ((int *)b->qcoeff)[2] = 0;
+                    ((int *)b->qcoeff)[3] = 0;
+                    ((int *)b->qcoeff)[4] = 0;
+                    ((int *)b->qcoeff)[5] = 0;
+                    ((int *)b->qcoeff)[6] = 0;
+                    ((int *)b->qcoeff)[7] = 0;
                 }
                 else
                 {
-                    vp8_dc_only_idct_add
-                        (b->qcoeff[0] * DQC[0],
-                        base_dst + b->offset, dst_stride,
-                        base_dst + b->offset, dst_stride);
+                    b->dqcoeff[0] = b->qcoeff[0] * xd->dequant_y2[0];
+                    vp8_short_inv_walsh4x4_1(&b->dqcoeff[0],
+                        xd->qcoeff);
                     ((int *)b->qcoeff)[0] = 0;
                 }
-            }
-        }
-    }
-    else
-    {
-        short *DQC = xd->dequant_y1;
 
-        if (xd->mode_info_context->mbmi.mode != SPLITMV)
-        {
-            BLOCKD *b = &xd->block[24];
-
-            /* do 2nd order transform on the dc block */
-            if (xd->eobs[24] > 1)
-            {
-                vp8_dequantize_b(b, xd->dequant_y2);
-
-                vp8_short_inv_walsh4x4(&b->dqcoeff[0],
-                    xd->qcoeff);
-                ((int *)b->qcoeff)[0] = 0;
-                ((int *)b->qcoeff)[1] = 0;
-                ((int *)b->qcoeff)[2] = 0;
-                ((int *)b->qcoeff)[3] = 0;
-                ((int *)b->qcoeff)[4] = 0;
-                ((int *)b->qcoeff)[5] = 0;
-                ((int *)b->qcoeff)[6] = 0;
-                ((int *)b->qcoeff)[7] = 0;
+                /* override the dc dequant constant in order to preserve the
+                 * dc components
+                 */
+                DQC = xd->dequant_y1_dc;
             }
-            else
-            {
-                b->dqcoeff[0] = b->qcoeff[0] * xd->dequant_y2[0];
-                vp8_short_inv_walsh4x4_1(&b->dqcoeff[0], xd->qcoeff);
-                ((int *)b->qcoeff)[0] = 0;
-            }
 
-            /* override the dc dequant constant */
-            DQC = xd->dequant_y1_dc;
+            vp8_dequant_idct_add_y_block
+                            (xd->qcoeff, DQC,
+                             xd->dst.y_buffer,
+                             xd->dst.y_stride, xd->eobs);
         }
 
-        vp8_dequant_idct_add_y_block
-                        (xd->qcoeff, DQC,
-                         xd->dst.y_buffer,
-                         xd->dst.y_stride, xd->eobs);
+        vp8_dequant_idct_add_uv_block
+                        (xd->qcoeff+16*16, xd->dequant_uv,
+                         xd->dst.u_buffer, xd->dst.v_buffer,
+                         xd->dst.uv_stride, xd->eobs+16);
     }
-
-    vp8_dequant_idct_add_uv_block
-                    (xd->qcoeff+16*16, xd->dequant_uv,
-                     xd->dst.u_buffer, xd->dst.v_buffer,
-                     xd->dst.uv_stride, xd->eobs+16);
 }
 
 typedef void (*init_current_bc_fn_t)(VP8D_COMP *pbi, MACROBLOCKD *xd,
@@ -392,6 +382,39 @@
        xd->mb_to_top_edge = -((mb_row * 16)) << 3;
        xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
 
+       if (pbi->common.filter_level)
+       {
+          xd->recon_above[0] = pbi->mt_yabove_row[mb_row] + 0*16 +32;
+          xd->recon_above[1] = pbi->mt_uabove_row[mb_row] + 0*8 +16;
+          xd->recon_above[2] = pbi->mt_vabove_row[mb_row] + 0*8 +16;
+
+          xd->recon_left[0] = pbi->mt_yleft_col[mb_row];
+          xd->recon_left[1] = pbi->mt_uleft_col[mb_row];
+          xd->recon_left[2] = pbi->mt_vleft_col[mb_row];
+
+          //TODO: move to outside row loop
+          xd->recon_left_stride[0] = 1;
+          xd->recon_left_stride[1] = 1;
+       }
+       else
+       {
+          xd->recon_above[0] = dst_buffer[0] + recon_yoffset;
+          xd->recon_above[1] = dst_buffer[1] + recon_uvoffset;
+          xd->recon_above[2] = dst_buffer[2] + recon_uvoffset;
+
+          xd->recon_left[0] = xd->recon_above[0] - 1;
+          xd->recon_left[1] = xd->recon_above[1] - 1;
+          xd->recon_left[2] = xd->recon_above[2] - 1;
+
+          xd->recon_above[0] -= xd->dst.y_stride;
+          xd->recon_above[1] -= xd->dst.uv_stride;
+          xd->recon_above[2] -= xd->dst.uv_stride;
+
+          //TODO: move to outside row loop
+          xd->recon_left_stride[0] = xd->dst.y_stride;
+          xd->recon_left_stride[1] = xd->dst.uv_stride;
+       }
+
        for (mb_col = 0; mb_col < pc->mb_cols; mb_col++)
        {
            if ( mb_row > 0 && (mb_col & (nsync-1)) == 0)
@@ -452,12 +475,23 @@
            /* propagate errors from reference frames */
            xd->corrupted |= ref_fb_corrupted[xd->mode_info_context->mbmi.ref_frame];
 
-           decode_macroblock(pbi, xd, mb_row, mb_col);
+           decode_macroblock(pbi, xd, 0);
 
            xd->left_available = 1;
 
            /* check if the boolean decoder has suffered an error */
            xd->corrupted |= vp8dx_bool_error(xd->current_bc);
+
+           xd->recon_above[0] += 16;
+           xd->recon_above[1] += 8;
+           xd->recon_above[2] += 8;
+
+           if (!pbi->common.filter_level)
+           {
+              xd->recon_left[0] += 16;
+              xd->recon_left[1] += 8;
+              xd->recon_left[2] += 8;
+           }
 
            if (pbi->common.filter_level)
            {