Bug 1332664 - Cherry-pick upstream webrtc.org patch to not depend on 'experimental' includes for libvpx. r=rillian, a=ritu
authorRandell Jesup <rjesup@jesup.org>
Fri, 27 Jan 2017 21:10:58 -0500
changeset 378116 900c746565fd10fb9feb7e28b6582030e77fee1b
parent 378115 721ea96b25ca00a6c61683ded3d1779da5d46487
child 378117 62764baa6ef6dec70b1f9248c6386eecc002ba7b
push id1419
push userjlund@mozilla.com
push dateMon, 10 Apr 2017 20:44:07 +0000
treeherdermozilla-release@5e6801b73ef6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersrillian, ritu
bugs1332664
milestone53.0a2
Bug 1332664 - Cherry-pick upstream webrtc.org patch to not depend on 'experimental' includes for libvpx. r=rillian, a=ritu
media/libvpx/libvpx/tools/all_builds.py
media/libvpx/update.py
media/libvpx/vp9_svc.patch
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
--- a/media/libvpx/libvpx/tools/all_builds.py
+++ b/media/libvpx/libvpx/tools/all_builds.py
@@ -1,16 +1,16 @@
 #!/usr/bin/python
 
 import getopt
 import subprocess
 import sys
 
 LONG_OPTIONS = ["shard=", "shards="]
-BASE_COMMAND = "./configure --enable-internal-stats --enable-experimental"
+BASE_COMMAND = "./configure --enable-internal-stats"
 
 def RunCommand(command):
   run = subprocess.Popen(command, shell=True)
   output = run.communicate()
   if run.returncode:
     print "Non-zero return code: " + str(run.returncode) + " => exiting!"
     sys.exit(1)
 
--- a/media/libvpx/update.py
+++ b/media/libvpx/update.py
@@ -36,16 +36,19 @@ def apply_patches():
     os.system("patch -p3 < bug1137614.patch")
     # Bug 1263384 - Check input frame resolution
     os.system("patch -p3 < input_frame_validation.patch")
     # Bug 1315288 - Check input frame resolution for vp9
     os.system("patch -p3 < input_frame_validation_vp9.patch")
     # Avoid c/asm name collision for loopfilter_sse2
     os.system("patch -p1 < rename_duplicate_files.patch")
     os.system("mv libvpx/vpx_dsp/x86/loopfilter_sse2.c libvpx/vpx_dsp/x86/loopfilter_intrin_sse2.c")
+    # Cherry-pick upstream patch for avoiding --enable-experimental
+    # from https://codereview.webrtc.org/2654633002
+    os.system("patch -p1 < vp9_svc.patch")
 
 
 def update_readme(commit):
     with open('README_MOZILLA') as f:
         readme = f.read()
 
     if 'The git commit ID used was' in readme:
         new_readme = re.sub('The git commit ID used was [v\.a-f0-9]+',
new file mode 100644
--- /dev/null
+++ b/media/libvpx/vp9_svc.patch
@@ -0,0 +1,179 @@
+# HG changeset patch
+# Parent  30581ce4c9566cf4fb4ff2798bddff5ce21741f5
+Bug 1332664: Cherry-pick upstream webrtc.org patch to not depend on 'experimental' includes for libvpx r=rillian
+
+diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+--- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
++++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+@@ -74,16 +74,17 @@ VP9EncoderImpl::VP9EncoderImpl()
+       frames_since_kf_(0),
+       num_temporal_layers_(0),
+       num_spatial_layers_(0),
+       num_cores_(0),
+       frames_encoded_(0),
+       // Use two spatial when screensharing with flexible mode.
+       spatial_layer_(new ScreenshareLayersVP9(2)) {
+   memset(&codec_, 0, sizeof(codec_));
++  memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t));
+   uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
+   srand(seed);
+ }
+ 
+ VP9EncoderImpl::~VP9EncoderImpl() {
+   Release();
+ }
+ 
+@@ -138,24 +139,24 @@ bool VP9EncoderImpl::SetSvcRates() {
+                            codec_.spatialLayers[i].target_bitrate_bps /
+                            total_bitrate_bps);
+     }
+   } else {
+     float rate_ratio[VPX_MAX_LAYERS] = {0};
+     float total = 0;
+ 
+     for (i = 0; i < num_spatial_layers_; ++i) {
+-      if (svc_internal_.svc_params.scaling_factor_num[i] <= 0 ||
+-          svc_internal_.svc_params.scaling_factor_den[i] <= 0) {
++      if (svc_params_.scaling_factor_num[i] <= 0 ||
++          svc_params_.scaling_factor_den[i] <= 0) {
+         LOG(LS_ERROR) << "Scaling factors not specified!";
+         return false;
+       }
+       rate_ratio[i] =
+-          static_cast<float>(svc_internal_.svc_params.scaling_factor_num[i]) /
+-          svc_internal_.svc_params.scaling_factor_den[i];
++          static_cast<float>(svc_params_.scaling_factor_num[i]) /
++          svc_params_.scaling_factor_den[i];
+       total += rate_ratio[i];
+     }
+ 
+     for (i = 0; i < num_spatial_layers_; ++i) {
+       config_->ss_target_bitrate[i] = static_cast<unsigned int>(
+           config_->rc_target_bitrate * rate_ratio[i] / total);
+       if (num_temporal_layers_ == 1) {
+         config_->layer_target_bitrate[i] = config_->ss_target_bitrate[i];
+@@ -385,32 +386,32 @@ int VP9EncoderImpl::NumberOfThreads(int 
+     return 1;
+   }
+ }
+ 
+ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
+   // Set QP-min/max per spatial and temporal layer.
+   int tot_num_layers = num_spatial_layers_ * num_temporal_layers_;
+   for (int i = 0; i < tot_num_layers; ++i) {
+-    svc_internal_.svc_params.max_quantizers[i] = config_->rc_max_quantizer;
+-    svc_internal_.svc_params.min_quantizers[i] = config_->rc_min_quantizer;
++    svc_params_.max_quantizers[i] = config_->rc_max_quantizer;
++    svc_params_.min_quantizers[i] = config_->rc_min_quantizer;
+   }
+   config_->ss_number_layers = num_spatial_layers_;
+   if (ExplicitlyConfiguredSpatialLayers()) {
+     for (int i = 0; i < num_spatial_layers_; ++i) {
+       const auto& layer = codec_.spatialLayers[i];
+-      svc_internal_.svc_params.scaling_factor_num[i] = layer.scaling_factor_num;
+-      svc_internal_.svc_params.scaling_factor_den[i] = layer.scaling_factor_den;
++      svc_params_.scaling_factor_num[i] = layer.scaling_factor_num;
++      svc_params_.scaling_factor_den[i] = layer.scaling_factor_den;
+     }
+   } else {
+     int scaling_factor_num = 256;
+     for (int i = num_spatial_layers_ - 1; i >= 0; --i) {
+       // 1:2 scaling in each dimension.
+-      svc_internal_.svc_params.scaling_factor_num[i] = scaling_factor_num;
+-      svc_internal_.svc_params.scaling_factor_den[i] = 256;
++      svc_params_.scaling_factor_num[i] = scaling_factor_num;
++      svc_params_.scaling_factor_den[i] = 256;
+       if (codec_.mode != kScreensharing)
+         scaling_factor_num /= 2;
+     }
+   }
+ 
+   if (!SetSvcRates()) {
+     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+   }
+@@ -424,17 +425,17 @@ int VP9EncoderImpl::InitAndSetControlSet
+   vpx_codec_control(encoder_, VP9E_SET_AQ_MODE,
+                     inst->codecSpecific.VP9.adaptiveQpMode ? 3 : 0);
+ 
+   vpx_codec_control(
+       encoder_, VP9E_SET_SVC,
+       (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) ? 1 : 0);
+   if (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) {
+     vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS,
+-                      &svc_internal_.svc_params);
++                      &svc_params_);
+   }
+   // Register callback for getting each spatial layer.
+   vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = {
+       VP9EncoderImpl::EncoderOutputCodedPacketCallback,
+       reinterpret_cast<void*>(this)};
+   vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK,
+                     reinterpret_cast<void*>(&cbp));
+ 
+@@ -680,21 +681,21 @@ void VP9EncoderImpl::PopulateCodecSpecif
+         static_cast<uint8_t>(frames_since_kf_ % gof_.num_frames_in_gof);
+     vp9_info->temporal_up_switch = gof_.temporal_up_switch[vp9_info->gof_idx];
+   }
+ 
+   if (vp9_info->ss_data_available) {
+     vp9_info->spatial_layer_resolution_present = true;
+     for (size_t i = 0; i < vp9_info->num_spatial_layers; ++i) {
+       vp9_info->width[i] = codec_.width *
+-                           svc_internal_.svc_params.scaling_factor_num[i] /
+-                           svc_internal_.svc_params.scaling_factor_den[i];
++                           svc_params_.scaling_factor_num[i] /
++                           svc_params_.scaling_factor_den[i];
+       vp9_info->height[i] = codec_.height *
+-                            svc_internal_.svc_params.scaling_factor_num[i] /
+-                            svc_internal_.svc_params.scaling_factor_den[i];
++                            svc_params_.scaling_factor_num[i] /
++                            svc_params_.scaling_factor_den[i];
+     }
+     if (!vp9_info->flexible_mode) {
+       vp9_info->gof.CopyGofInfoVP9(gof_);
+     }
+   }
+ }
+ 
+ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
+diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
+--- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
++++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
+@@ -12,17 +12,17 @@
+ #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
+ #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
+ 
+ #include <vector>
+ 
+ #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
+ #include "webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
+ 
+-#include "vpx/svc_context.h"
++#include "vpx/vp8cx.h"
+ #include "vpx/vpx_decoder.h"
+ #include "vpx/vpx_encoder.h"
+ 
+ namespace webrtc {
+ 
+ class ScreenshareLayersVP9;
+ 
+ class VP9EncoderImpl : public VP9Encoder {
+@@ -111,17 +111,17 @@ class VP9EncoderImpl : public VP9Encoder
+   bool inited_;
+   int64_t timestamp_;
+   uint16_t picture_id_;
+   int cpu_speed_;
+   uint32_t rc_max_intra_target_;
+   vpx_codec_ctx_t* encoder_;
+   vpx_codec_enc_cfg_t* config_;
+   vpx_image_t* raw_;
+-  SvcInternal_t svc_internal_;
++  vpx_svc_extra_cfg_t svc_params_;
+   const VideoFrame* input_image_;
+   GofInfoVP9 gof_;       // Contains each frame's temporal information for
+                          // non-flexible mode.
+   uint8_t tl0_pic_idx_;  // Only used in non-flexible mode.
+   size_t frames_since_kf_;
+   uint8_t num_temporal_layers_;
+   uint8_t num_spatial_layers_;
+   uint8_t num_cores_;
--- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -74,16 +74,17 @@ VP9EncoderImpl::VP9EncoderImpl()
       frames_since_kf_(0),
       num_temporal_layers_(0),
       num_spatial_layers_(0),
       num_cores_(0),
       frames_encoded_(0),
       // Use two spatial when screensharing with flexible mode.
       spatial_layer_(new ScreenshareLayersVP9(2)) {
   memset(&codec_, 0, sizeof(codec_));
+  memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t));
   uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
   srand(seed);
 }
 
 VP9EncoderImpl::~VP9EncoderImpl() {
   Release();
 }
 
@@ -138,24 +139,24 @@ bool VP9EncoderImpl::SetSvcRates() {
                            codec_.spatialLayers[i].target_bitrate_bps /
                            total_bitrate_bps);
     }
   } else {
     float rate_ratio[VPX_MAX_LAYERS] = {0};
     float total = 0;
 
     for (i = 0; i < num_spatial_layers_; ++i) {
-      if (svc_internal_.svc_params.scaling_factor_num[i] <= 0 ||
-          svc_internal_.svc_params.scaling_factor_den[i] <= 0) {
+      if (svc_params_.scaling_factor_num[i] <= 0 ||
+          svc_params_.scaling_factor_den[i] <= 0) {
         LOG(LS_ERROR) << "Scaling factors not specified!";
         return false;
       }
       rate_ratio[i] =
-          static_cast<float>(svc_internal_.svc_params.scaling_factor_num[i]) /
-          svc_internal_.svc_params.scaling_factor_den[i];
+          static_cast<float>(svc_params_.scaling_factor_num[i]) /
+          svc_params_.scaling_factor_den[i];
       total += rate_ratio[i];
     }
 
     for (i = 0; i < num_spatial_layers_; ++i) {
       config_->ss_target_bitrate[i] = static_cast<unsigned int>(
           config_->rc_target_bitrate * rate_ratio[i] / total);
       if (num_temporal_layers_ == 1) {
         config_->layer_target_bitrate[i] = config_->ss_target_bitrate[i];
@@ -385,32 +386,32 @@ int VP9EncoderImpl::NumberOfThreads(int 
     return 1;
   }
 }
 
 int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
   // Set QP-min/max per spatial and temporal layer.
   int tot_num_layers = num_spatial_layers_ * num_temporal_layers_;
   for (int i = 0; i < tot_num_layers; ++i) {
-    svc_internal_.svc_params.max_quantizers[i] = config_->rc_max_quantizer;
-    svc_internal_.svc_params.min_quantizers[i] = config_->rc_min_quantizer;
+    svc_params_.max_quantizers[i] = config_->rc_max_quantizer;
+    svc_params_.min_quantizers[i] = config_->rc_min_quantizer;
   }
   config_->ss_number_layers = num_spatial_layers_;
   if (ExplicitlyConfiguredSpatialLayers()) {
     for (int i = 0; i < num_spatial_layers_; ++i) {
       const auto& layer = codec_.spatialLayers[i];
-      svc_internal_.svc_params.scaling_factor_num[i] = layer.scaling_factor_num;
-      svc_internal_.svc_params.scaling_factor_den[i] = layer.scaling_factor_den;
+      svc_params_.scaling_factor_num[i] = layer.scaling_factor_num;
+      svc_params_.scaling_factor_den[i] = layer.scaling_factor_den;
     }
   } else {
     int scaling_factor_num = 256;
     for (int i = num_spatial_layers_ - 1; i >= 0; --i) {
       // 1:2 scaling in each dimension.
-      svc_internal_.svc_params.scaling_factor_num[i] = scaling_factor_num;
-      svc_internal_.svc_params.scaling_factor_den[i] = 256;
+      svc_params_.scaling_factor_num[i] = scaling_factor_num;
+      svc_params_.scaling_factor_den[i] = 256;
       if (codec_.mode != kScreensharing)
         scaling_factor_num /= 2;
     }
   }
 
   if (!SetSvcRates()) {
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   }
@@ -424,17 +425,17 @@ int VP9EncoderImpl::InitAndSetControlSet
   vpx_codec_control(encoder_, VP9E_SET_AQ_MODE,
                     inst->codecSpecific.VP9.adaptiveQpMode ? 3 : 0);
 
   vpx_codec_control(
       encoder_, VP9E_SET_SVC,
       (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) ? 1 : 0);
   if (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) {
     vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS,
-                      &svc_internal_.svc_params);
+                      &svc_params_);
   }
   // Register callback for getting each spatial layer.
   vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = {
       VP9EncoderImpl::EncoderOutputCodedPacketCallback,
       reinterpret_cast<void*>(this)};
   vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK,
                     reinterpret_cast<void*>(&cbp));
 
@@ -680,21 +681,21 @@ void VP9EncoderImpl::PopulateCodecSpecif
         static_cast<uint8_t>(frames_since_kf_ % gof_.num_frames_in_gof);
     vp9_info->temporal_up_switch = gof_.temporal_up_switch[vp9_info->gof_idx];
   }
 
   if (vp9_info->ss_data_available) {
     vp9_info->spatial_layer_resolution_present = true;
     for (size_t i = 0; i < vp9_info->num_spatial_layers; ++i) {
       vp9_info->width[i] = codec_.width *
-                           svc_internal_.svc_params.scaling_factor_num[i] /
-                           svc_internal_.svc_params.scaling_factor_den[i];
+                           svc_params_.scaling_factor_num[i] /
+                           svc_params_.scaling_factor_den[i];
       vp9_info->height[i] = codec_.height *
-                            svc_internal_.svc_params.scaling_factor_num[i] /
-                            svc_internal_.svc_params.scaling_factor_den[i];
+                            svc_params_.scaling_factor_num[i] /
+                            svc_params_.scaling_factor_den[i];
     }
     if (!vp9_info->flexible_mode) {
       vp9_info->gof.CopyGofInfoVP9(gof_);
     }
   }
 }
 
 int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
--- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -12,17 +12,17 @@
 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
 
 #include <vector>
 
 #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
 #include "webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
 
-#include "vpx/svc_context.h"
+#include "vpx/vp8cx.h"
 #include "vpx/vpx_decoder.h"
 #include "vpx/vpx_encoder.h"
 
 namespace webrtc {
 
 class ScreenshareLayersVP9;
 
 class VP9EncoderImpl : public VP9Encoder {
@@ -111,17 +111,17 @@ class VP9EncoderImpl : public VP9Encoder
   bool inited_;
   int64_t timestamp_;
   uint16_t picture_id_;
   int cpu_speed_;
   uint32_t rc_max_intra_target_;
   vpx_codec_ctx_t* encoder_;
   vpx_codec_enc_cfg_t* config_;
   vpx_image_t* raw_;
-  SvcInternal_t svc_internal_;
+  vpx_svc_extra_cfg_t svc_params_;
   const VideoFrame* input_image_;
   GofInfoVP9 gof_;       // Contains each frame's temporal information for
                          // non-flexible mode.
   uint8_t tl0_pic_idx_;  // Only used in non-flexible mode.
   size_t frames_since_kf_;
   uint8_t num_temporal_layers_;
   uint8_t num_spatial_layers_;
   uint8_t num_cores_;