Bug 1341285: rollup of changes for webrtc after applying webrtc.org v57 update r=ng,jesup,pehrsons,drno,dminor,cpearce,jya,glandium,dmajor
authorRandell Jesup <rjesup@jesup.org>
Tue, 13 Jun 2017 01:54:13 -0400
changeset 412410 cbb06ea384e95b8e33886c0825bee14867a8851b
parent 412409 fda2b2655b267852bfd29fe60a3ca0664c8de18b
child 412411 5bbdb7d36ee3c136a0ed03be9d5b012d05dfd08e
push id7566
push usermtabara@mozilla.com
push dateWed, 02 Aug 2017 08:25:16 +0000
treeherdermozilla-beta@86913f512c3c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersng, jesup, pehrsons, drno, dminor, cpearce, jya, glandium, dmajor
bugs1341285
milestone56.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1341285: rollup of changes for webrtc after applying webrtc.org v57 update r=ng,jesup,pehrsons,drno,dminor,cpearce,jya,glandium,dmajor Includes re-importing gyp files removed from upstream in v56, and then updating them to match the BUILD.gn file changes.
CLOBBER
build/gyp.mozbuild
dom/media/gmp-plugin-openh264/gmp-fake-openh264.cpp
dom/media/gmp/GMPVideoEncoderParent.cpp
dom/media/gmp/GMPVideoEncoderParent.h
dom/media/systemservices/CamerasChild.h
dom/media/systemservices/CamerasParent.cpp
dom/media/systemservices/CamerasParent.h
dom/media/systemservices/LoadManager.cpp
dom/media/systemservices/LoadManager.h
dom/media/systemservices/LoadManagerFactory.cpp
dom/media/systemservices/LoadManagerFactory.h
dom/media/systemservices/LoadMonitor.cpp
dom/media/systemservices/LoadMonitor.h
dom/media/systemservices/VideoEngine.cpp
dom/media/systemservices/VideoEngine.h
dom/media/systemservices/VideoFrameUtils.cpp
dom/media/systemservices/moz.build
dom/media/tests/mochitest/head.js
dom/media/tests/mochitest/pc.js
dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html
dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_webgl.html
dom/media/tests/mochitest/test_peerConnection_multiple_captureStream_canvas_2d.html
dom/media/tests/mochitest/test_peerConnection_renderAfterRenegotiation.html
dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
dom/media/tests/mochitest/test_peerConnection_scaleResolution.html
dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
dom/media/tests/mochitest/test_peerConnection_videoRenegotiationInactiveAnswer.html
dom/media/webrtc/MediaEngineRemoteVideoSource.h
dom/media/webrtc/MediaEngineWebRTC.cpp
dom/media/webrtc/MediaEngineWebRTC.h
ipc/chromium/src/third_party/moz.build
media/libvpx/moz.build
media/webrtc/moz.build
media/webrtc/signaling/signaling.gyp
media/webrtc/signaling/src/common/NullTransport.h
media/webrtc/signaling/src/common/YuvStamper.cpp
media/webrtc/signaling/src/common/browser_logging/WebRtcLog.cpp
media/webrtc/signaling/src/jsep/JsepSessionImpl.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.h
media/webrtc/signaling/src/media-conduit/GmpVideoCodec.cpp
media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.h
media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp
media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.h
media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.cpp
media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.h
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipelineFilter.cpp
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
media/webrtc/trunk/gtest/moz.build
media/webrtc/trunk/peerconnection.gyp
media/webrtc/trunk/webrtc/api/api.gyp
media/webrtc/trunk/webrtc/api/api_java.gyp
media/webrtc/trunk/webrtc/api/video/video_frame.h
media/webrtc/trunk/webrtc/audio/webrtc_audio.gypi
media/webrtc/trunk/webrtc/base/base.gyp
media/webrtc/trunk/webrtc/base/base_tests.gyp
media/webrtc/trunk/webrtc/base/basictypes.h
media/webrtc/trunk/webrtc/base/checks.cc
media/webrtc/trunk/webrtc/base/macutils.cc
media/webrtc/trunk/webrtc/base/platform_thread.cc
media/webrtc/trunk/webrtc/base/platform_thread.h
media/webrtc/trunk/webrtc/base/sanitizer.h
media/webrtc/trunk/webrtc/base/sequenced_task_checker_impl.cc
media/webrtc/trunk/webrtc/base/sigslot.h
media/webrtc/trunk/webrtc/base/stringutils.h
media/webrtc/trunk/webrtc/base/task_queue_libevent.cc
media/webrtc/trunk/webrtc/build/android/AndroidManifest.xml
media/webrtc/trunk/webrtc/build/arm_neon.gypi
media/webrtc/trunk/webrtc/build/chromium_common.gypi
media/webrtc/trunk/webrtc/build/common.gypi
media/webrtc/trunk/webrtc/build/filename_rules.gypi
media/webrtc/trunk/webrtc/build/ios/merge_ios_libs.gyp
media/webrtc/trunk/webrtc/build/ios/objc_app.gypi
media/webrtc/trunk/webrtc/build/isolate.gypi
media/webrtc/trunk/webrtc/build/merge_libs.gyp
media/webrtc/trunk/webrtc/build/merge_libs_voice.gyp
media/webrtc/trunk/webrtc/build/merge_voice_libs.gyp
media/webrtc/trunk/webrtc/build/objc_common.gypi
media/webrtc/trunk/webrtc/build/protoc.gypi
media/webrtc/trunk/webrtc/call/audio_send_stream.cc
media/webrtc/trunk/webrtc/call/audio_send_stream_call.cc
media/webrtc/trunk/webrtc/call/call.cc
media/webrtc/trunk/webrtc/call/call.h
media/webrtc/trunk/webrtc/call/webrtc_call.gypi
media/webrtc/trunk/webrtc/common.gyp
media/webrtc/trunk/webrtc/common_audio/common_audio.gyp
media/webrtc/trunk/webrtc/common_audio/resampler/include/resampler.h
media/webrtc/trunk/webrtc/common_audio/resampler/push_resampler.cc
media/webrtc/trunk/webrtc/common_audio/resampler/resampler.cc
media/webrtc/trunk/webrtc/common_audio/resampler/resampler_unittest.cc
media/webrtc/trunk/webrtc/common_audio/resampler/sinc_resampler.cc
media/webrtc/trunk/webrtc/common_audio/resampler/sinc_resampler_neon.cc
media/webrtc/trunk/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.cc
media/webrtc/trunk/webrtc/common_audio/wav_file.cc
media/webrtc/trunk/webrtc/common_audio/wav_header.cc
media/webrtc/trunk/webrtc/common_audio/window_generator.cc
media/webrtc/trunk/webrtc/common_types.cc
media/webrtc/trunk/webrtc/common_types.h
media/webrtc/trunk/webrtc/common_video/common_video.gyp
media/webrtc/trunk/webrtc/common_video/h264/h264_common.h
media/webrtc/trunk/webrtc/common_video/libyuv/webrtc_libyuv.cc
media/webrtc/trunk/webrtc/config.cc
media/webrtc/trunk/webrtc/config.h
media/webrtc/trunk/webrtc/media/base/videoadapter.cc
media/webrtc/trunk/webrtc/media/base/videoadapter.h
media/webrtc/trunk/webrtc/media/media.gyp
media/webrtc/trunk/webrtc/modules/audio_coding/acm2/rent_a_codec.h
media/webrtc/trunk/webrtc/modules/audio_coding/audio_coding.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/audio_coding_tests.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/audio_decoder.h
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/cng/cng.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g711/g711.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/g722/g722.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/interfaces.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isac.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isac_common.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isacfix.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/isac/isacfix_test.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/opus/opus.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/red/red.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc
media/webrtc/trunk/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc
media/webrtc/trunk/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h
media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
media/webrtc/trunk/webrtc/modules/audio_conference_mixer/audio_conference_mixer.gypi
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_template.h
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_record_jni.h
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_track_jni.h
media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java
media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_player.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_player.h
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_recorder.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_recorder.h
media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc
media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.h
media/webrtc/trunk/webrtc/modules/audio_device/gonk/audio_manager.cc
media/webrtc/trunk/webrtc/modules/audio_device/gonk/audio_manager.h
media/webrtc/trunk/webrtc/modules/audio_device/include/audio_device.h
media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc
media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc
media/webrtc/trunk/webrtc/modules/audio_device/mac/audio_device_mac.cc
media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.cc
media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.h
media/webrtc/trunk/webrtc/modules/audio_device/sndio/audio_device_sndio.cc
media/webrtc/trunk/webrtc/modules/audio_device/sndio/audio_device_sndio.h
media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc
media/webrtc/trunk/webrtc/modules/audio_device/win/audio_device_core_win.cc
media/webrtc/trunk/webrtc/modules/audio_mixer/audio_mixer.gypi
media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi
media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing_tests.gypi
media/webrtc/trunk/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc
media/webrtc/trunk/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc
media/webrtc/trunk/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h
media/webrtc/trunk/webrtc/modules/audio_processing/include/audio_processing.h
media/webrtc/trunk/webrtc/modules/audio_processing/include/config.h
media/webrtc/trunk/webrtc/modules/audio_processing/logging/apm_data_dumper.cc
media/webrtc/trunk/webrtc/modules/audio_processing/logging/apm_data_dumper.h
media/webrtc/trunk/webrtc/modules/audio_processing/three_band_filter_bank.cc
media/webrtc/trunk/webrtc/modules/audio_processing/utility/delay_estimator.cc
media/webrtc/trunk/webrtc/modules/bitrate_controller/bitrate_controller.gypi
media/webrtc/trunk/webrtc/modules/congestion_controller/congestion_controller.gypi
media/webrtc/trunk/webrtc/modules/desktop_capture/app_capturer.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/app_capturer.h
media/webrtc/trunk/webrtc/modules/desktop_capture/app_capturer_mac.mm
media/webrtc/trunk/webrtc/modules/desktop_capture/app_capturer_null.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/app_capturer_unittest.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/app_capturer_win.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/app_capturer_x11.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/cropping_window_capturer.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/cropping_window_capturer.h
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture.gypi
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capture_types.h
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capturer.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capturer.h
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.h
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_device_info.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_device_info.h
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_device_info_null.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_frame_rotation.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/differ_block.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_device_info_mac.h
media/webrtc/trunk/webrtc/modules/desktop_capture/mac/desktop_device_info_mac.mm
media/webrtc/trunk/webrtc/modules/desktop_capture/mac/window_list_utils.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor.h
media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm
media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_mac.mm
media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_win.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/screen_capturer_x11.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/win/desktop_device_info_win.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/win/desktop_device_info_win.h
media/webrtc/trunk/webrtc/modules/desktop_capture/win/screen_capture_utils.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/win/screen_capturer_win_directx.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/win/screen_capturer_win_directx.h
media/webrtc/trunk/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
media/webrtc/trunk/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
media/webrtc/trunk/webrtc/modules/desktop_capture/win/win_shared.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/win/win_shared.h
media/webrtc/trunk/webrtc/modules/desktop_capture/win/window_capture_utils.h
media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_mac.mm
media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_null.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_win.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/window_capturer_x11.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/x11/desktop_device_info_x11.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/x11/desktop_device_info_x11.h
media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_util.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/x11/shared_x_util.h
media/webrtc/trunk/webrtc/modules/desktop_capture/x11/x_error_trap.cc
media/webrtc/trunk/webrtc/modules/desktop_capture/x11/x_error_trap.h
media/webrtc/trunk/webrtc/modules/include/module_common_types.h
media/webrtc/trunk/webrtc/modules/media_file/media_file.gypi
media/webrtc/trunk/webrtc/modules/media_file/media_file_utility.cc
media/webrtc/trunk/webrtc/modules/modules.gyp
media/webrtc/trunk/webrtc/modules/pacing/pacing.gypi
media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes_unittest.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_packet.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/ssrc_database.h
media/webrtc/trunk/webrtc/modules/rtp_rtcp/test/testFec/test_fec.gypi
media/webrtc/trunk/webrtc/modules/utility/source/jvm_android.cc
media/webrtc/trunk/webrtc/modules/utility/source/process_thread_impl.cc
media/webrtc/trunk/webrtc/modules/utility/utility.gypi
media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc
media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h
media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java
media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc
media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h
media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc
media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.h
media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc
media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.h
media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation.h
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info.h
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.h
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_objc.h
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_objc.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_utility.h
media/webrtc/trunk/webrtc/modules/video_capture/objc/device_info.h
media/webrtc/trunk/webrtc/modules/video_capture/objc/device_info.mm
media/webrtc/trunk/webrtc/modules/video_capture/objc/rtc_video_capture_objc.mm
media/webrtc/trunk/webrtc/modules/video_capture/test/video_capture_unittest.cc
media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi
media/webrtc/trunk/webrtc/modules/video_capture/video_capture.h
media/webrtc/trunk/webrtc/modules/video_capture/video_capture_defines.h
media/webrtc/trunk/webrtc/modules/video_capture/video_capture_factory.cc
media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/BasePin.cpp
media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.h
media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_mf.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_mf.h
media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h
media/webrtc/trunk/webrtc/modules/video_capture/windows/video_capture_ds.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/video_capture_ds.h
media/webrtc/trunk/webrtc/modules/video_capture/windows/video_capture_factory_windows.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/video_capture_mf.cc
media/webrtc/trunk/webrtc/modules/video_coding/codec_database.cc
media/webrtc/trunk/webrtc/modules/video_coding/codecs/h264/h264.gypi
media/webrtc/trunk/webrtc/modules/video_coding/codecs/i420/i420.gypi
media/webrtc/trunk/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h
media/webrtc/trunk/webrtc/modules/video_coding/codecs/test/video_codecs_test_framework.gypi
media/webrtc/trunk/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
media/webrtc/trunk/webrtc/modules/video_coding/decoding_state.cc
media/webrtc/trunk/webrtc/modules/video_coding/frame_buffer.cc
media/webrtc/trunk/webrtc/modules/video_coding/frame_buffer2.cc
media/webrtc/trunk/webrtc/modules/video_coding/frame_buffer2.h
media/webrtc/trunk/webrtc/modules/video_coding/frame_buffer2_unittest.cc
media/webrtc/trunk/webrtc/modules/video_coding/generic_decoder.h
media/webrtc/trunk/webrtc/modules/video_coding/generic_encoder.cc
media/webrtc/trunk/webrtc/modules/video_coding/h264_sprop_parameter_sets_unittest.cc
media/webrtc/trunk/webrtc/modules/video_coding/include/video_coding.h
media/webrtc/trunk/webrtc/modules/video_coding/include/video_coding_defines.h
media/webrtc/trunk/webrtc/modules/video_coding/jitter_buffer.cc
media/webrtc/trunk/webrtc/modules/video_coding/jitter_buffer.h
media/webrtc/trunk/webrtc/modules/video_coding/jitter_buffer_common.h
media/webrtc/trunk/webrtc/modules/video_coding/media_optimization.cc
media/webrtc/trunk/webrtc/modules/video_coding/media_optimization.h
media/webrtc/trunk/webrtc/modules/video_coding/packet.cc
media/webrtc/trunk/webrtc/modules/video_coding/receiver.cc
media/webrtc/trunk/webrtc/modules/video_coding/receiver.h
media/webrtc/trunk/webrtc/modules/video_coding/rtt_filter.cc
media/webrtc/trunk/webrtc/modules/video_coding/session_info.cc
media/webrtc/trunk/webrtc/modules/video_coding/utility/video_coding_utility.gyp
media/webrtc/trunk/webrtc/modules/video_coding/video_codec_initializer.cc
media/webrtc/trunk/webrtc/modules/video_coding/video_coding.gypi
media/webrtc/trunk/webrtc/modules/video_coding/video_coding_impl.cc
media/webrtc/trunk/webrtc/modules/video_coding/video_coding_impl.h
media/webrtc/trunk/webrtc/modules/video_coding/video_coding_test.gypi
media/webrtc/trunk/webrtc/modules/video_coding/video_receiver.cc
media/webrtc/trunk/webrtc/modules/video_coding/video_sender.cc
media/webrtc/trunk/webrtc/modules/video_processing/video_processing.gypi
media/webrtc/trunk/webrtc/p2p/p2p.gyp
media/webrtc/trunk/webrtc/pc/pc.gyp
media/webrtc/trunk/webrtc/sdk/sdk.gyp
media/webrtc/trunk/webrtc/sdk/sdk.gypi
media/webrtc/trunk/webrtc/stats/stats.gyp
media/webrtc/trunk/webrtc/supplement.gypi
media/webrtc/trunk/webrtc/system_wrappers/cpu_features_chromium.gyp
media/webrtc/trunk/webrtc/system_wrappers/cpu_features_webrtc.gyp
media/webrtc/trunk/webrtc/system_wrappers/include/asm_defines.h
media/webrtc/trunk/webrtc/system_wrappers/include/static_instance.h
media/webrtc/trunk/webrtc/system_wrappers/include/trace.h
media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_non_darwin_unix.cc
media/webrtc/trunk/webrtc/system_wrappers/source/clock.cc
media/webrtc/trunk/webrtc/system_wrappers/source/cpu_features.cc
media/webrtc/trunk/webrtc/system_wrappers/source/cpu_features_android.c
media/webrtc/trunk/webrtc/system_wrappers/source/droid-cpu-features.c
media/webrtc/trunk/webrtc/system_wrappers/source/droid-cpu-features.h
media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock_win.cc
media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc
media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.h
media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc
media/webrtc/trunk/webrtc/system_wrappers/system_wrappers.gyp
media/webrtc/trunk/webrtc/test/mock_voice_engine.h
media/webrtc/trunk/webrtc/test/test.gyp
media/webrtc/trunk/webrtc/tools/internal_tools.gyp
media/webrtc/trunk/webrtc/tools/loopback_test/adapter.js
media/webrtc/trunk/webrtc/tools/rtcbot/botmanager.js
media/webrtc/trunk/webrtc/tools/rtcbot/test.js
media/webrtc/trunk/webrtc/tools/tools.gyp
media/webrtc/trunk/webrtc/typedefs.h
media/webrtc/trunk/webrtc/video/receive_statistics_proxy.cc
media/webrtc/trunk/webrtc/video/receive_statistics_proxy.h
media/webrtc/trunk/webrtc/video/rtp_stream_receiver.cc
media/webrtc/trunk/webrtc/video/rtp_stream_receiver.h
media/webrtc/trunk/webrtc/video/rtp_streams_synchronizer.cc
media/webrtc/trunk/webrtc/video/video_receive_stream.cc
media/webrtc/trunk/webrtc/video/video_receive_stream.h
media/webrtc/trunk/webrtc/video/video_send_stream.cc
media/webrtc/trunk/webrtc/video/video_send_stream.h
media/webrtc/trunk/webrtc/video/video_stream_decoder.cc
media/webrtc/trunk/webrtc/video/video_stream_decoder.h
media/webrtc/trunk/webrtc/video/vie_encoder.cc
media/webrtc/trunk/webrtc/video/vie_encoder.h
media/webrtc/trunk/webrtc/video/webrtc_video.gypi
media/webrtc/trunk/webrtc/video_encoder.h
media/webrtc/trunk/webrtc/video_engine/browser_capture_impl.h
media/webrtc/trunk/webrtc/video_engine/desktop_capture_impl.cc
media/webrtc/trunk/webrtc/video_engine/desktop_capture_impl.h
media/webrtc/trunk/webrtc/video_receive_stream.h
media/webrtc/trunk/webrtc/video_send_stream.h
media/webrtc/trunk/webrtc/voice_engine/channel.cc
media/webrtc/trunk/webrtc/voice_engine/channel.h
media/webrtc/trunk/webrtc/voice_engine/channel_proxy.cc
media/webrtc/trunk/webrtc/voice_engine/channel_proxy.h
media/webrtc/trunk/webrtc/voice_engine/file_player.cc
media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h
media/webrtc/trunk/webrtc/voice_engine/include/voe_external_media.h
media/webrtc/trunk/webrtc/voice_engine/include/voe_hardware.h
media/webrtc/trunk/webrtc/voice_engine/include/voe_rtp_rtcp.h
media/webrtc/trunk/webrtc/voice_engine/include/voe_video_sync.h
media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h
media/webrtc/trunk/webrtc/voice_engine/output_mixer.cc
media/webrtc/trunk/webrtc/voice_engine/output_mixer.h
media/webrtc/trunk/webrtc/voice_engine/shared_data.cc
media/webrtc/trunk/webrtc/voice_engine/shared_data.h
media/webrtc/trunk/webrtc/voice_engine/test/auto_test/standard/external_media_test.cc
media/webrtc/trunk/webrtc/voice_engine/test/auto_test/standard/hardware_before_streaming_test.cc
media/webrtc/trunk/webrtc/voice_engine/test/auto_test/standard/hardware_test.cc
media/webrtc/trunk/webrtc/voice_engine/test/auto_test/standard/video_sync_test.cc
media/webrtc/trunk/webrtc/voice_engine/test/channel_transport/udp_transport_impl.cc
media/webrtc/trunk/webrtc/voice_engine/transmit_mixer.cc
media/webrtc/trunk/webrtc/voice_engine/utility_unittest.cc
media/webrtc/trunk/webrtc/voice_engine/voe_base_impl.cc
media/webrtc/trunk/webrtc/voice_engine/voe_external_media_impl.cc
media/webrtc/trunk/webrtc/voice_engine/voe_external_media_impl.h
media/webrtc/trunk/webrtc/voice_engine/voe_hardware_impl.cc
media/webrtc/trunk/webrtc/voice_engine/voe_hardware_impl.h
media/webrtc/trunk/webrtc/voice_engine/voe_rtp_rtcp_impl.cc
media/webrtc/trunk/webrtc/voice_engine/voe_rtp_rtcp_impl.h
media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.cc
media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.h
media/webrtc/trunk/webrtc/voice_engine/voice_engine.gyp
media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h
media/webrtc/trunk/webrtc/webrtc.gyp
media/webrtc/trunk/webrtc/webrtc_examples.gyp
media/webrtc/trunk/webrtc/webrtc_tests.gypi
mobile/android/app/build.gradle
mobile/android/base/moz.build
modules/libpref/init/all.js
--- a/CLOBBER
+++ b/CLOBBER
@@ -17,10 +17,9 @@
 #
 # Modifying this file will now automatically clobber the buildbot machines \o/
 #
 
 # Are you updating CLOBBER because you think it's needed for your WebIDL
 # changes to stick? As of bug 928195, this shouldn't be necessary! Please
 # don't change CLOBBER for WebIDL changes any more.
 
-
-Merge day clobber
\ No newline at end of file
+Bug 1341285: From experience, due to all the build changes in a webrtc.org landing make a clobber usually necessary
--- a/build/gyp.mozbuild
+++ b/build/gyp.mozbuild
@@ -6,16 +6,19 @@
 
 include('gyp_base.mozbuild')
 
 gyp_vars.update({
     'lsan': 0,
     'asan': 0,
     'build_with_mozilla': 1,
     'build_with_chromium': 0,
+    # 10.9 once we move to TC cross-compiles - bug 1270217
+    'mac_sdk_min': '10.7',
+    'mac_deployment_target': '10.7',
     'use_official_google_api_keys': 0,
     'have_clock_monotonic': 1 if CONFIG['HAVE_CLOCK_MONOTONIC'] else 0,
     'have_ethtool_cmd_speed_hi': 1 if CONFIG['MOZ_WEBRTC_HAVE_ETHTOOL_SPEED_HI'] else 0,
     'include_alsa_audio': 1 if CONFIG['MOZ_ALSA'] else 0,
     'include_pulse_audio': 1 if CONFIG['MOZ_PULSEAUDIO'] else 0,
     # basic stuff for everything
     'include_internal_video_render': 0,
     'clang': 1 if CONFIG['CLANG_CXX'] else 0,
@@ -26,24 +29,28 @@ gyp_vars.update({
     'enable_android_opensl_output': 0,
     # use_system_lib* still seems to be in use in trunk/build
     'use_system_libjpeg': 0,
     'use_system_libvpx': 0,
     'build_json': 0,
     'build_libjpeg': 0,
     'build_libyuv': 0,
     'build_libvpx': 0,
+    'build_libevent': 0,
     'build_ssl': 0,
     'build_json': 0,
     'build_icu': 0,
     'build_opus': 0,
     'libyuv_dir': '/media/libyuv/libyuv',
+    'libevent_dir': '/ipc/chromium/src/third_party/libevent',
     'yuv_disable_avx2': 0 if CONFIG['HAVE_X86_AVX2'] else 1,
     # don't use openssl
     'use_openssl': 0,
+    # Must match build/gyp.mozbuild WEBRTC_BUILD_LIBEVENT
+    #'enable_libevent': 0, default according to OS
 
     'debug': 1 if CONFIG['DEBUG'] else 0,
 
     'use_x11': 1 if CONFIG['MOZ_X11'] else 0,
     'use_glib': 1 if CONFIG['GLIB_LIBS'] else 0,
 
      # turn off mandatory use of NEON and instead use NEON detection
     'arm_neon': 0,
@@ -69,31 +76,35 @@ gyp_vars.update({
     'include_g711': 1,
     'include_opus': 1,
     'include_g722': 1,
     'include_ilbc': 0,
     # We turn on ISAC because the AGC uses parts of it, and depend on the
     # linker to throw away uneeded bits.
     'include_isac': 1,
     'include_pcm16b': 1,
+
+    #'rtc_opus_variable_complexity': 1,
 })
 
 if os == 'Android':
     gyp_vars.update(
         gtest_target_type='executable',
         moz_webrtc_mediacodec=1,
         android_toolchain=CONFIG.get('ANDROID_TOOLCHAIN', ''),
     )
 
 if CONFIG['ARM_ARCH']:
     if int(CONFIG['ARM_ARCH']) < 7:
         gyp_vars['armv7'] = 0
         gyp_vars['arm_neon_optional'] = 0
     elif os == 'Android':
         gyp_vars['armv7'] = 1
+        gyp_vars['arm_neon'] = 1
+        gyp_vars['build_with_neon'] = 1
     else:
         # CPU detection for ARM works on Android only.  armv7 always uses CPU
         # detection, so we have to set armv7=0 for non-Android target
         gyp_vars['armv7'] = 0
     # For libyuv
     gyp_vars['arm_version'] = int(CONFIG['ARM_ARCH'])
 
 # Don't try to compile ssse3/sse4.1 code if toolchain doesn't support
--- a/dom/media/gmp-plugin-openh264/gmp-fake-openh264.cpp
+++ b/dom/media/gmp-plugin-openh264/gmp-fake-openh264.cpp
@@ -51,48 +51,50 @@
 #include "gmp-video-frame-encoded.h"
 
 #if defined(GMP_FAKE_SUPPORT_DECRYPT)
 #include "gmp-decryption.h"
 #include "gmp-test-decryptor.h"
 #include "gmp-test-storage.h"
 #endif
 
+#include "mozilla/PodOperations.h"
+
 #if defined(_MSC_VER)
 #define PUBLIC_FUNC __declspec(dllexport)
 #else
 #define PUBLIC_FUNC
 #endif
 
 #define BIG_FRAME 10000
 
-static int g_log_level = 0;
-
-#define GMPLOG(l, x) do { \
-        if (l <= g_log_level) { \
-        const char *log_string = "unknown"; \
-        if ((l >= 0) && (l <= 3)) {               \
-        log_string = kLogStrings[l];            \
-        } \
-        std::cerr << log_string << ": " << x << std::endl; \
-        } \
-    } while(0)
-
 #define GL_CRIT 0
 #define GL_ERROR 1
 #define GL_INFO  2
 #define GL_DEBUG 3
 
 const char* kLogStrings[] = {
   "Critical",
   "Error",
   "Info",
   "Debug"
 };
 
+static int g_log_level = GL_CRIT;
+
+#define GMPLOG(l, x) do {                                  \
+        if (l <= g_log_level) {                            \
+        const char *log_string = "unknown";                \
+        if ((l >= 0) && (l <= 3)) {                        \
+        log_string = kLogStrings[l];                       \
+        }                                                  \
+        std::cerr << log_string << ": " << x << std::endl; \
+        }                                                  \
+    } while(0)
+
 
 GMPPlatformAPI* g_platform_api = NULL;
 
 class FakeVideoEncoder;
 class FakeVideoDecoder;
 
 struct EncodedFrame {
   uint32_t length_;
@@ -103,16 +105,38 @@ struct EncodedFrame {
   uint8_t y_;
   uint8_t u_;
   uint8_t v_;
   uint32_t timestamp_;
 };
 
 #define ENCODED_FRAME_MAGIC 0x4652414d
 
+template <typename T> class SelfDestruct {
+ public:
+  explicit SelfDestruct (T* t) : t_ (t) {}
+  ~SelfDestruct() {
+    if (t_) {
+      t_->Destroy();
+    }
+  }
+
+#if 0 // unused
+  T* forget() {
+    T* t = t_;
+    t_ = nullptr;
+
+    return t;
+  }
+#endif
+
+ private:
+  T* t_;
+};
+
 class FakeEncoderTask : public GMPTask {
  public:
   FakeEncoderTask(FakeVideoEncoder* encoder,
                   GMPVideoi420Frame* frame,
                   GMPVideoFrameType type)
       : encoder_(encoder), frame_(frame), type_(type) {}
 
   void Run() override;
@@ -122,29 +146,109 @@ class FakeEncoderTask : public GMPTask {
   GMPVideoi420Frame* frame_;
   GMPVideoFrameType type_;
 };
 
 class FakeVideoEncoder : public GMPVideoEncoder {
  public:
   explicit FakeVideoEncoder (GMPVideoHost* hostAPI) :
     host_ (hostAPI),
-    callback_ (NULL) {}
+    callback_ (NULL),
+    frames_encoded_(0) {}
 
   void InitEncode (const GMPVideoCodec& codecSettings,
                    const uint8_t* aCodecSpecific,
                    uint32_t aCodecSpecificSize,
                    GMPVideoEncoderCallback* callback,
                    int32_t numberOfCores,
                    uint32_t maxPayloadSize) override {
     callback_ = callback;
 
+    const char *env = getenv("GMP_LOGGING");
+    if (env) {
+      g_log_level = atoi(env);
+    }
     GMPLOG (GL_INFO, "Initialized encoder");
   }
 
+  void SendFrame(GMPVideoi420Frame* inputImage,
+                 GMPVideoFrameType frame_type,
+                 uint8_t nal_type)
+  {
+    // Encode this in a frame that looks a little bit like H.264.
+    // Send SPS/PPS/IDR to avoid confusing people
+    // Copy the data. This really should convert this to network byte order.
+    EncodedFrame eframe;
+    eframe.length_ = sizeof(eframe) - sizeof(uint32_t);
+    eframe.h264_compat_ = nal_type; // 7 = SPS, 8 = PPS, 5 = IFrame/IDR slice, 1=PFrame/slice
+    eframe.magic_ = ENCODED_FRAME_MAGIC;
+    eframe.width_ = inputImage->Width();
+    eframe.height_ = inputImage->Height();
+    eframe.y_ = AveragePlane(inputImage->Buffer(kGMPYPlane),
+                             inputImage->AllocatedSize(kGMPYPlane));
+    eframe.u_ = AveragePlane(inputImage->Buffer(kGMPUPlane),
+                             inputImage->AllocatedSize(kGMPUPlane));
+    eframe.v_ = AveragePlane(inputImage->Buffer(kGMPVPlane),
+                             inputImage->AllocatedSize(kGMPVPlane));
+
+    eframe.timestamp_ = inputImage->Timestamp();
+
+    // Now return the encoded data back to the parent.
+    GMPVideoFrame* ftmp;
+    GMPErr err = host_->CreateFrame(kGMPEncodedVideoFrame, &ftmp);
+    if (err != GMPNoErr) {
+      GMPLOG (GL_ERROR, "Error creating encoded frame");
+      return;
+    }
+
+    GMPVideoEncodedFrame* f = static_cast<GMPVideoEncodedFrame*> (ftmp);
+
+    err = f->CreateEmptyFrame (sizeof(eframe) +
+                               (nal_type == 5 ? sizeof(uint32_t) + BIG_FRAME : 0));
+    if (err != GMPNoErr) {
+      GMPLOG (GL_ERROR, "Error allocating frame data");
+      f->Destroy();
+      return;
+    }
+    memcpy(f->Buffer(), &eframe, sizeof(eframe));
+    if (nal_type == 5) {
+      // set the size for the fake iframe
+      *((uint32_t*) (f->Buffer() + sizeof(eframe))) = BIG_FRAME;
+    }
+
+    f->SetEncodedWidth(eframe.width_);
+    f->SetEncodedHeight(eframe.height_);
+    f->SetTimeStamp(eframe.timestamp_);
+    f->SetFrameType(frame_type);
+    f->SetCompleteFrame(true);
+    f->SetBufferType(GMP_BufferLength32);
+
+    GMPLOG (GL_DEBUG, "Encoding complete. type= "
+            << f->FrameType()
+            << " NAL_type="
+            << (int) eframe.h264_compat_
+            << " length="
+            << f->Size()
+            << " timestamp="
+            << f->TimeStamp()
+            << " width/height="
+            << eframe.width_
+            << "x" << eframe.height_);
+
+    // Return the encoded frame.
+    GMPCodecSpecificInfo info;
+    mozilla::PodZero(&info);
+    info.mCodecType = kGMPVideoCodecH264;
+    info.mBufferType = GMP_BufferLength32;
+    info.mCodecSpecific.mH264.mSimulcastIdx = 0;
+    GMPLOG (GL_DEBUG, "Calling callback");
+    callback_->Encoded (f, reinterpret_cast<uint8_t*> (&info), sizeof(info));
+    GMPLOG (GL_DEBUG, "Callback called");
+  }
+
   void Encode (GMPVideoi420Frame* inputImage,
                const uint8_t* aCodecSpecificInfo,
                uint32_t aCodecSpecificInfoLength,
                const GMPVideoFrameType* aFrameTypes,
                uint32_t aFrameTypesLength) override {
     GMPLOG (GL_DEBUG,
             __FUNCTION__
             << " size="
@@ -154,89 +258,35 @@ class FakeVideoEncoder : public GMPVideo
 
     g_platform_api->runonmainthread(new FakeEncoderTask(this,
                                                         inputImage,
                                                         aFrameTypes[0]));
   }
 
   void Encode_m (GMPVideoi420Frame* inputImage,
                  GMPVideoFrameType frame_type) {
+    SelfDestruct<GMPVideoi420Frame> ifd (inputImage);
+
     if (frame_type  == kGMPKeyFrame) {
       if (!inputImage)
         return;
     }
     if (!inputImage) {
       GMPLOG (GL_ERROR, "no input image");
       return;
     }
 
-    // Now return the encoded data back to the parent.
-    GMPVideoFrame* ftmp;
-    GMPErr err = host_->CreateFrame(kGMPEncodedVideoFrame, &ftmp);
-    if (err != GMPNoErr) {
-      GMPLOG (GL_ERROR, "Error creating encoded frame");
-      return;
+    if (frame_type  == kGMPKeyFrame ||
+        frames_encoded_++ % 10 == 0) { // periodically send iframes anyways
+      SendFrame(inputImage, kGMPKeyFrame, 7); // 7 = SPS, 8 = PPS, 5 = IFrame/IDR slice, 1=PFrame/slice
+      SendFrame(inputImage, kGMPKeyFrame, 8);
+      SendFrame(inputImage, kGMPKeyFrame, 5);
+    } else {
+      SendFrame(inputImage, frame_type, 1);
     }
-
-    GMPVideoEncodedFrame* f = static_cast<GMPVideoEncodedFrame*> (ftmp);
-
-    // Encode this in a frame that looks a little bit like H.264.
-    // Note that we don't do PPS or SPS.
-    // Copy the data. This really should convert this to network byte order.
-    EncodedFrame eframe;
-    eframe.length_ = sizeof(eframe) - sizeof(uint32_t);
-    eframe.h264_compat_ = 5; // Emulate a H.264 IDR NAL.
-    eframe.magic_ = ENCODED_FRAME_MAGIC;
-    eframe.width_ = inputImage->Width();
-    eframe.height_ = inputImage->Height();
-    eframe.y_ = AveragePlane(inputImage->Buffer(kGMPYPlane),
-                             inputImage->AllocatedSize(kGMPYPlane));
-    eframe.u_ = AveragePlane(inputImage->Buffer(kGMPUPlane),
-                             inputImage->AllocatedSize(kGMPUPlane));
-    eframe.v_ = AveragePlane(inputImage->Buffer(kGMPVPlane),
-                             inputImage->AllocatedSize(kGMPVPlane));
-
-    eframe.timestamp_ = inputImage->Timestamp();
-
-    err = f->CreateEmptyFrame (sizeof(eframe) +
-                               (frame_type  == kGMPKeyFrame ? sizeof(uint32_t) + BIG_FRAME : 0));
-    if (err != GMPNoErr) {
-      GMPLOG (GL_ERROR, "Error allocating frame data");
-      f->Destroy();
-      return;
-    }
-    memcpy(f->Buffer(), &eframe, sizeof(eframe));
-    if (frame_type  == kGMPKeyFrame) {
-      // set the size for the fake iframe
-      *((uint32_t*) (f->Buffer() + sizeof(eframe))) = BIG_FRAME;
-    }
-
-    f->SetEncodedWidth (inputImage->Width());
-    f->SetEncodedHeight (inputImage->Height());
-    f->SetTimeStamp (inputImage->Timestamp());
-    f->SetFrameType (frame_type);
-    f->SetCompleteFrame (true);
-    f->SetBufferType(GMP_BufferLength32);
-
-    GMPLOG (GL_DEBUG, "Encoding complete. type= "
-            << f->FrameType()
-            << " length="
-            << f->Size()
-            << " timestamp="
-            << f->TimeStamp());
-
-    // Return the encoded frame.
-    GMPCodecSpecificInfo info;
-    memset (&info, 0, sizeof (info));
-    info.mCodecType = kGMPVideoCodecH264;
-    info.mBufferType = GMP_BufferLength32;
-    info.mCodecSpecific.mH264.mSimulcastIdx = 0;
-    GMPLOG (GL_DEBUG, "Calling callback");
-    callback_->Encoded (f, reinterpret_cast<uint8_t*> (&info), sizeof(info));
-    GMPLOG (GL_DEBUG, "Callback called");
   }
 
   void SetChannelParameters (uint32_t aPacketLoss, uint32_t aRTT) override {
   }
 
   void SetRates (uint32_t aNewBitRate, uint32_t aFrameRate) override {
   }
 
@@ -255,21 +305,22 @@ class FakeVideoEncoder : public GMPVideo
       val += ptr[i];
     }
 
     return (val / len) % 0xff;
   }
 
   GMPVideoHost* host_;
   GMPVideoEncoderCallback* callback_;
+  uint32_t frames_encoded_;
 };
 
 void FakeEncoderTask::Run() {
   encoder_->Encode_m(frame_, type_);
-  frame_->Destroy();
+  frame_ = nullptr; // Encode_m() destroys the frame
 }
 
 class FakeDecoderTask : public GMPTask {
  public:
   FakeDecoderTask(FakeVideoDecoder* decoder,
                   GMPVideoEncodedFrame* frame,
                   int64_t time)
       : decoder_(decoder), frame_(frame), time_(time) {}
@@ -292,16 +343,20 @@ class FakeVideoDecoder : public GMPVideo
 
   void InitDecode (const GMPVideoCodec& codecSettings,
                    const uint8_t* aCodecSpecific,
                    uint32_t aCodecSpecificSize,
                    GMPVideoDecoderCallback* callback,
                    int32_t coreCount) override {
     GMPLOG (GL_INFO, "InitDecode");
 
+    const char *env = getenv("GMP_LOGGING");
+    if (env) {
+      g_log_level = atoi(env);
+    }
     callback_ = callback;
   }
 
   void Decode (GMPVideoEncodedFrame* inputFrame,
                bool missingFrames,
                const uint8_t* aCodecSpecificInfo,
                uint32_t aCodecSpecificInfoLength,
                int64_t renderTimeMs = -1) override {
@@ -319,27 +374,39 @@ class FakeVideoDecoder : public GMPVideo
 
   void DecodingComplete() override {
     delete this;
   }
 
   // Return the decoded data back to the parent.
   void Decode_m (GMPVideoEncodedFrame* inputFrame,
                  int64_t renderTimeMs) {
+    // Attach a self-destructor so that the input frame is destroyed on return.
+    SelfDestruct<GMPVideoEncodedFrame> ifd (inputFrame);
+
     EncodedFrame *eframe;
+    eframe = reinterpret_cast<EncodedFrame*>(inputFrame->Buffer());
+    GMPLOG(GL_DEBUG,"magic="  << eframe->magic_ << " h264_compat="  << (int) eframe->h264_compat_
+           << " width=" << eframe->width_ << " height=" << eframe->height_
+           << " timestamp=" << inputFrame->TimeStamp()
+           << " y/u/v=" << (int) eframe->y_ << ":" << (int) eframe->u_ << ":" << (int) eframe->v_);
     if (inputFrame->Size() != (sizeof(*eframe))) {
       GMPLOG (GL_ERROR, "Couldn't decode frame. Size=" << inputFrame->Size());
       return;
     }
-    eframe = reinterpret_cast<EncodedFrame*>(inputFrame->Buffer());
 
     if (eframe->magic_ != ENCODED_FRAME_MAGIC) {
       GMPLOG (GL_ERROR, "Couldn't decode frame. Magic=" << eframe->magic_);
       return;
     }
+    if (eframe->h264_compat_ != 5 && eframe->h264_compat_ != 1) {
+      // only return video for iframes or pframes
+      GMPLOG (GL_DEBUG, "Not a video frame: NAL type " << (int) eframe->h264_compat_);
+      // Decode it anyways
+    }
 
     int width = eframe->width_;
     int height = eframe->height_;
     int ystride = eframe->width_;
     int uvstride = eframe->width_/2;
 
     GMPLOG (GL_DEBUG, "Video frame ready for display "
             << width
@@ -376,26 +443,25 @@ class FakeVideoDecoder : public GMPVideo
            eframe->v_,
            frame->AllocatedSize(kGMPVPlane));
 
     GMPLOG (GL_DEBUG, "Allocated size = "
             << frame->AllocatedSize (kGMPYPlane));
     frame->SetTimestamp (inputFrame->TimeStamp());
     frame->SetDuration (inputFrame->Duration());
     callback_->Decoded (frame);
-
   }
 
   GMPVideoHost* host_;
   GMPVideoDecoderCallback* callback_;
 };
 
 void FakeDecoderTask::Run() {
   decoder_->Decode_m(frame_, time_);
-  frame_->Destroy();
+  frame_ = nullptr; // Decode_m() destroys the frame
 }
 
 extern "C" {
 
   PUBLIC_FUNC GMPErr
   GMPInit (GMPPlatformAPI* aPlatformAPI) {
     g_platform_api = aPlatformAPI;
     return GMPNoErr;
--- a/dom/media/gmp/GMPVideoEncoderParent.cpp
+++ b/dom/media/gmp/GMPVideoEncoderParent.cpp
@@ -53,28 +53,16 @@ GMPVideoEncoderParent::GMPVideoEncoderPa
   mShuttingDown(false),
   mActorDestroyed(false),
   mPlugin(aPlugin),
   mCallback(nullptr),
   mVideoHost(this),
   mPluginId(aPlugin->GetPluginId())
 {
   MOZ_ASSERT(mPlugin);
-
-  nsresult rv = NS_NewNamedThread("GMPEncoded", getter_AddRefs(mEncodedThread));
-  if (NS_FAILED(rv)) {
-    MOZ_CRASH();
-  }
-}
-
-GMPVideoEncoderParent::~GMPVideoEncoderParent()
-{
-  if (mEncodedThread) {
-    mEncodedThread->Shutdown();
-  }
 }
 
 GMPVideoHostImpl&
 GMPVideoEncoderParent::Host()
 {
   return mVideoHost;
 }
 
@@ -82,16 +70,17 @@ GMPVideoEncoderParent::Host()
 void
 GMPVideoEncoderParent::Close()
 {
   LOGD(("%s::%s: %p", __CLASS__, __FUNCTION__, this));
   MOZ_ASSERT(mPlugin->GMPEventTarget()->IsOnCurrentThread());
   // Consumer is done with us; we can shut down.  No more callbacks should
   // be made to mCallback.  Note: do this before Shutdown()!
   mCallback = nullptr;
+
   // Let Shutdown mark us as dead so it knows if we had been alive
 
   // In case this is the last reference
   RefPtr<GMPVideoEncoderParent> kungfudeathgrip(this);
   Release();
   Shutdown();
 }
 
@@ -104,16 +93,17 @@ GMPVideoEncoderParent::InitEncode(const 
 {
   LOGD(("%s::%s: %p", __CLASS__, __FUNCTION__, this));
   if (mIsOpen) {
     NS_WARNING("Trying to re-init an in-use GMP video encoder!");
     return GMPGenericErr;;
   }
 
   MOZ_ASSERT(mPlugin->GMPEventTarget()->IsOnCurrentThread());
+  MOZ_ASSERT(!mCallback);
 
   if (!aCallback) {
     return GMPGenericErr;
   }
   mCallback = aCallback;
 
   if (!SendInitEncode(aCodecSettings, aCodecSpecific, aNumberOfCores, aMaxPayloadSize)) {
     return GMPGenericErr;
@@ -233,83 +223,50 @@ GMPVideoEncoderParent::Shutdown()
   }
 
   mIsOpen = false;
   if (!mActorDestroyed) {
     Unused << SendEncodingComplete();
   }
 }
 
-static void
-ShutdownEncodedThread(nsCOMPtr<nsIThread>& aThread)
-{
-  aThread->Shutdown();
-}
-
 // Note: Keep this sync'd up with Shutdown
 void
 GMPVideoEncoderParent::ActorDestroy(ActorDestroyReason aWhy)
 {
   LOGD(("%s::%s: %p (%d)", __CLASS__, __FUNCTION__, this, (int) aWhy));
   mIsOpen = false;
   mActorDestroyed = true;
   if (mCallback) {
     // May call Close() (and Shutdown()) immediately or with a delay
     mCallback->Terminated();
     mCallback = nullptr;
   }
-  // Must be shut down before VideoEncoderDestroyed(), since this can recurse
-  // the GMPThread event loop.  See bug 1049501
-  if (mEncodedThread) {
-    nsCOMPtr<nsIRunnable> r = WrapRunnableNM(
-      &ShutdownEncodedThread, nsCOMPtr<nsIThread>(mEncodedThread));
-    SystemGroup::Dispatch("ShutdownEncodedThread", TaskCategory::Other, r.forget());
-    mEncodedThread = nullptr;
-  }
   if (mPlugin) {
     // Ignore any return code. It is OK for this to fail without killing the process.
     mPlugin->VideoEncoderDestroyed(this);
     mPlugin = nullptr;
   }
   mVideoHost.ActorDestroyed(); // same as DoneWithAPI
   MaybeDisconnect(aWhy == AbnormalShutdown);
 }
 
-static void
-EncodedCallback(GMPVideoEncoderCallbackProxy* aCallback,
-                GMPVideoEncodedFrame* aEncodedFrame,
-                nsTArray<uint8_t>* aCodecSpecificInfo,
-                nsCOMPtr<nsIThread> aThread)
-{
-  aCallback->Encoded(aEncodedFrame, *aCodecSpecificInfo);
-  delete aCodecSpecificInfo;
-  // Ugh.  Must destroy the frame on GMPThread.
-  // XXX add locks to the ShmemManager instead?
-  aThread->Dispatch(WrapRunnable(aEncodedFrame,
-                                &GMPVideoEncodedFrame::Destroy),
-                   NS_DISPATCH_NORMAL);
-}
-
 mozilla::ipc::IPCResult
 GMPVideoEncoderParent::RecvEncoded(const GMPVideoEncodedFrameData& aEncodedFrame,
                                    InfallibleTArray<uint8_t>&& aCodecSpecificInfo)
 {
   if (!mCallback) {
     return IPC_FAIL_NO_REASON(this);
   }
 
   auto f = new GMPVideoEncodedFrameImpl(aEncodedFrame, &mVideoHost);
-  nsTArray<uint8_t> *codecSpecificInfo = new nsTArray<uint8_t>;
-  codecSpecificInfo->AppendElements((uint8_t*)aCodecSpecificInfo.Elements(), aCodecSpecificInfo.Length());
-  nsCOMPtr<nsIThread> thread = NS_GetCurrentThread();
-
-  mEncodedThread->Dispatch(WrapRunnableNM(&EncodedCallback,
-                                          mCallback, f, codecSpecificInfo, thread),
-                           NS_DISPATCH_NORMAL);
-
+  // Ignore any return code. It is OK for this to fail without killing the process.
+  // This can be called on any thread (or more than one)
+  mCallback->Encoded(f, aCodecSpecificInfo);
+  f->Destroy();
   return IPC_OK();
 }
 
 mozilla::ipc::IPCResult
 GMPVideoEncoderParent::RecvError(const GMPErr& aError)
 {
   if (!mCallback) {
     return IPC_FAIL_NO_REASON(this);
--- a/dom/media/gmp/GMPVideoEncoderParent.h
+++ b/dom/media/gmp/GMPVideoEncoderParent.h
@@ -59,17 +59,17 @@ public:
 #endif
   }
   void Dealloc(Shmem& aMem) override
   {
     DeallocShmem(aMem);
   }
 
 private:
-  virtual ~GMPVideoEncoderParent();
+  virtual ~GMPVideoEncoderParent() {};
 
   // PGMPVideoEncoderParent
   void ActorDestroy(ActorDestroyReason aWhy) override;
   mozilla::ipc::IPCResult RecvEncoded(const GMPVideoEncodedFrameData& aEncodedFrame,
                                       InfallibleTArray<uint8_t>&& aCodecSpecificInfo) override;
   mozilla::ipc::IPCResult RecvError(const GMPErr& aError) override;
   mozilla::ipc::IPCResult RecvShutdown() override;
   mozilla::ipc::IPCResult RecvParentShmemForPool(Shmem&& aFrameBuffer) override;
@@ -78,16 +78,15 @@ private:
   mozilla::ipc::IPCResult Recv__delete__() override;
 
   bool mIsOpen;
   bool mShuttingDown;
   bool mActorDestroyed;
   RefPtr<GMPContentParent> mPlugin;
   GMPVideoEncoderCallbackProxy* mCallback;
   GMPVideoHostImpl mVideoHost;
-  nsCOMPtr<nsIThread> mEncodedThread;
   const uint32_t mPluginId;
 };
 
 } // namespace gmp
 } // namespace mozilla
 
 #endif // GMPVideoEncoderParent_h_
--- a/dom/media/systemservices/CamerasChild.h
+++ b/dom/media/systemservices/CamerasChild.h
@@ -14,22 +14,18 @@
 #include "mozilla/camera/PCamerasParent.h"
 #include "mozilla/media/DeviceChangeCallback.h"
 #include "mozilla/Mutex.h"
 #include "base/singleton.h"
 #include "nsCOMPtr.h"
 
 // conflicts with #include of scoped_ptr.h
 #undef FF
-#include "webrtc/common.h"
-#include "webrtc/video_renderer.h"
 #include "webrtc/modules/video_capture/video_capture_defines.h"
 
-
-
 namespace mozilla {
 
 namespace ipc {
 class BackgroundChildImpl;
 class PrincipalInfo;
 }
 
 namespace camera {
--- a/dom/media/systemservices/CamerasParent.cpp
+++ b/dom/media/systemservices/CamerasParent.cpp
@@ -249,18 +249,18 @@ CamerasParent::DeliverFrameOverIPC(Captu
 }
 
 ShmemBuffer
 CamerasParent::GetBuffer(size_t aSize)
 {
   return mShmemPool.GetIfAvailable(aSize);
 }
 
-int32_t
-CallbackHelper::RenderFrame(uint32_t aStreamId, const webrtc::VideoFrame& aVideoFrame)
+void
+CallbackHelper::OnFrame(const webrtc::VideoFrame& aVideoFrame)
 {
   LOG_VERBOSE((__PRETTY_FUNCTION__));
   RefPtr<DeliverFrameRunnable> runnable = nullptr;
   // Get frame properties
   camera::VideoFrameProperties properties;
   VideoFrameUtils::InitFrameBufferProperties(aVideoFrame, properties);
   // Get a shared memory buffer to copy the frame data into
   ShmemBuffer shMemBuffer = mParent->GetBuffer(properties.bufferSize());
@@ -279,30 +279,16 @@ CallbackHelper::RenderFrame(uint32_t aSt
   if (!runnable.get()) {
     runnable = new DeliverFrameRunnable(mParent, mCapEngine, mStreamId,
                                         aVideoFrame, properties);
   }
   MOZ_ASSERT(mParent);
   nsIEventTarget* target = mParent->GetBackgroundEventTarget();
   MOZ_ASSERT(target != nullptr);
   target->Dispatch(runnable, NS_DISPATCH_NORMAL);
-  return 0;
-}
-
-void
-CallbackHelper::OnIncomingCapturedFrame(const int32_t id, const webrtc::VideoFrame& aVideoFrame)
-{
- LOG_VERBOSE((__PRETTY_FUNCTION__));
- RenderFrame(id,aVideoFrame);
-}
-
-void
-CallbackHelper::OnCaptureDelayChanged(const int32_t id, const int32_t delay)
-{
-  LOG((__PRETTY_FUNCTION__));
 }
 
 mozilla::ipc::IPCResult
 CamerasParent::RecvReleaseFrame(mozilla::ipc::Shmem&& s) {
   mShmemPool.Put(ShmemBuffer(s));
   return IPC_OK();
 }
 
@@ -713,21 +699,17 @@ CamerasParent::RecvAllocateCaptureDevice
       media::NewRunnableFrom([self, allowed, aCapEngine, unique_id]() -> nsresult {
         int numdev = -1;
         int error = -1;
         if (allowed && self->EnsureInitialized(aCapEngine)) {
           auto engine = self->mEngines[aCapEngine].get();
           engine->CreateVideoCapture(numdev, unique_id.get());
           engine->WithEntry(numdev, [&error](VideoEngine::CaptureEntry& cap) {
             if (cap.VideoCapture()) {
-              if (!cap.VideoRenderer()) {
-                LOG(("VideoEngine::VideoRenderer() failed"));
-              } else {
-                error = 0;
-              }
+              error = 0;
             }
           });
         }
         RefPtr<nsIRunnable> ipc_runnable =
           media::NewRunnableFrom([self, numdev, error]() -> nsresult {
             if (self->IsShuttingDown()) {
               return NS_ERROR_FAILURE;
             }
@@ -802,47 +784,40 @@ CamerasParent::RecvStartCapture(const Ca
 {
   LOG((__PRETTY_FUNCTION__));
 
   RefPtr<CamerasParent> self(this);
   RefPtr<Runnable> webrtc_runnable =
     media::NewRunnableFrom([self, aCapEngine, capnum, ipcCaps]() -> nsresult {
       LOG((__PRETTY_FUNCTION__));
       CallbackHelper** cbh;
-      webrtc::VideoRenderCallback* render;
       VideoEngine* engine = nullptr;
       int error = -1;
       if (self->EnsureInitialized(aCapEngine)) {
         cbh = self->mCallbacks.AppendElement(
           new CallbackHelper(static_cast<CaptureEngine>(aCapEngine), capnum, self));
-        render = static_cast<webrtc::VideoRenderCallback*>(*cbh);
 
         engine = self->mEngines[aCapEngine];
-        engine->WithEntry(capnum, [capnum, &render, &engine, &error, &ipcCaps, &cbh](VideoEngine::CaptureEntry& cap) {
-          cap.VideoRenderer()->AddIncomingRenderStream(capnum,0, 0., 0., 1., 1.);
-          error = cap.VideoRenderer()->AddExternalRenderCallback(capnum, render);
-          if (!error) {
-            error = cap.VideoRenderer()->StartRender(capnum);
-          }
-
+        engine->WithEntry(capnum, [capnum, &engine, &error, &ipcCaps, &cbh](VideoEngine::CaptureEntry& cap) {
+          error = 0;
           webrtc::VideoCaptureCapability capability;
           capability.width = ipcCaps.width();
           capability.height = ipcCaps.height();
           capability.maxFPS = ipcCaps.maxFPS();
           capability.expectedCaptureDelay = ipcCaps.expectedCaptureDelay();
           capability.rawType = static_cast<webrtc::RawVideoType>(ipcCaps.rawType());
           capability.codecType = static_cast<webrtc::VideoCodecType>(ipcCaps.codecType());
           capability.interlaced = ipcCaps.interlaced();
 
           if (!error) {
             error = cap.VideoCapture()->StartCapture(capability);
           }
           if (!error) {
             engine->Startup();
-            cap.VideoCapture()->RegisterCaptureDataCallback(*static_cast<webrtc::VideoCaptureDataCallback*>(*cbh));
+            cap.VideoCapture()->RegisterCaptureDataCallback(static_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(*cbh));
           }
         });
       }
       RefPtr<nsIRunnable> ipc_runnable =
         media::NewRunnableFrom([self, error]() -> nsresult {
           if (self->IsShuttingDown()) {
             return NS_ERROR_FAILURE;
           }
@@ -866,30 +841,26 @@ CamerasParent::StopCapture(const Capture
                            const int& capnum)
 {
   if (auto engine = EnsureInitialized(aCapEngine)) {
     engine->WithEntry(capnum,[capnum](VideoEngine::CaptureEntry& cap){
       if (cap.VideoCapture()) {
         cap.VideoCapture()->StopCapture();
         cap.VideoCapture()->DeRegisterCaptureDataCallback();
       }
-      if (cap.VideoRenderer()) {
-        cap.VideoRenderer()->StopRender(capnum);
-      }
     });
     // we're removing elements, iterate backwards
     for (size_t i = mCallbacks.Length(); i > 0; i--) {
       if (mCallbacks[i-1]->mCapEngine == aCapEngine
           && mCallbacks[i-1]->mStreamId == (uint32_t) capnum) {
         delete mCallbacks[i-1];
         mCallbacks.RemoveElementAt(i-1);
         break;
       }
     }
-    engine->RemoveRenderer(capnum);
     engine->Shutdown();
   }
 }
 
 mozilla::ipc::IPCResult
 CamerasParent::RecvStopCapture(const CaptureEngine& aCapEngine,
                                const int& capnum)
 {
--- a/dom/media/systemservices/CamerasParent.h
+++ b/dom/media/systemservices/CamerasParent.h
@@ -10,58 +10,48 @@
 #include "nsIObserver.h"
 #include "VideoEngine.h"
 #include "mozilla/dom/ContentParent.h"
 #include "mozilla/camera/PCamerasParent.h"
 #include "mozilla/ipc/Shmem.h"
 #include "mozilla/ShmemPool.h"
 #include "mozilla/Atomics.h"
 #include "webrtc/modules/video_capture/video_capture.h"
-#include "webrtc/modules/video_render/video_render_impl.h"
 #include "webrtc/modules/video_capture/video_capture_defines.h"
 #include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/media/base/videosinkinterface.h"
 
 // conflicts with #include of scoped_ptr.h
 #undef FF
-#include "webrtc/common.h"
+#include "webrtc/common_types.h"
 
 #include "CamerasChild.h"
 
 #include "base/thread.h"
 
 namespace mozilla {
 
 namespace ipc {
 class PrincipalInfo;
 }
 
 namespace camera {
 
 class CamerasParent;
 
 class CallbackHelper :
-  public webrtc::VideoRenderCallback,
-  public webrtc::VideoCaptureDataCallback
+  public rtc::VideoSinkInterface<webrtc::VideoFrame>
 {
 public:
   CallbackHelper(CaptureEngine aCapEng, uint32_t aStreamId, CamerasParent *aParent)
     : mCapEngine(aCapEng), mStreamId(aStreamId), mParent(aParent) {};
 
-  // ViEExternalRenderer implementation. These callbacks end up
-  // running on the VideoCapture thread.
-  virtual int32_t RenderFrame(const uint32_t aStreamId, const webrtc::VideoFrame& video_frame) override;
-
+  // These callbacks end up running on the VideoCapture thread.
   // From  VideoCaptureCallback
-  virtual void OnIncomingCapturedFrame(const int32_t id, const webrtc::VideoFrame& videoFrame) override;
-  virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
-
-	// TODO(@@NG) This is now part of webrtc::VideoRenderer, not in the webrtc::VideoRenderCallback
-	// virtual bool IsTextureSupported() const override { return false; };
-	//
-	// virtual bool SmoothsRenderedFrames() const override { return false; }
+  virtual void OnFrame(const webrtc::VideoFrame& videoFrame) override;
 
   friend CamerasParent;
 
 private:
   CaptureEngine mCapEngine;
   uint32_t mStreamId;
   CamerasParent *mParent;
 };
deleted file mode 100644
--- a/dom/media/systemservices/LoadManager.cpp
+++ /dev/null
@@ -1,228 +0,0 @@
-/* -*- Mode: C++; tab-width: 50; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "LoadManager.h"
-#include "LoadMonitor.h"
-#include "nsString.h"
-#include "mozilla/Logging.h"
-#include "mozilla/SizePrintfMacros.h"
-#include "prtime.h"
-#include "prinrval.h"
-#include "prsystem.h"
-
-#include "nsString.h"
-#include "nsThreadUtils.h"
-#include "nsReadableUtils.h"
-#include "nsIObserverService.h"
-#include "mozilla/Telemetry.h"
-#include "mozilla/ArrayUtils.h"
-
-// MOZ_LOG=LoadManager:5
-mozilla::LazyLogModule gLoadManagerLog("LoadManager");
-#undef LOG
-#undef LOG_ENABLED
-#define LOG(args) MOZ_LOG(gLoadManagerLog, mozilla::LogLevel::Debug, args)
-#define LOG_ENABLED() MOZ_LOG_TEST(gLoadManagerLog, mozilla::LogLevel::Verbose)
-
-namespace mozilla {
-
-/* static */ StaticRefPtr<LoadManagerSingleton> LoadManagerSingleton::sSingleton;
-
-NS_IMPL_ISUPPORTS(LoadManagerSingleton, nsIObserver)
-
-
-LoadManagerSingleton::LoadManagerSingleton(bool aEncoderOnly,
-                                           int aLoadMeasurementInterval,
-                                           int aAveragingMeasurements,
-                                           float aHighLoadThreshold,
-                                           float aLowLoadThreshold)
-  : mLock("LoadManager"),
-    mCurrentState(webrtc::kLoadNormal),
-    mOveruseActive(false),
-    mLoadSum(0.0f),
-    mLoadSumMeasurements(0),
-    mLoadMeasurementInterval(aLoadMeasurementInterval),
-    mAveragingMeasurements(aAveragingMeasurements),
-    mHighLoadThreshold(aHighLoadThreshold),
-    mLowLoadThreshold(aLowLoadThreshold)
-{
-  LOG(("LoadManager - Initializing (%dms x %d, %f, %f)",
-       mLoadMeasurementInterval, mAveragingMeasurements,
-       mHighLoadThreshold, mLowLoadThreshold));
-  MOZ_ASSERT(mHighLoadThreshold > mLowLoadThreshold);
-  if (!aEncoderOnly) {
-    mLoadMonitor = new LoadMonitor(mLoadMeasurementInterval);
-    mLoadMonitor->Init(mLoadMonitor);
-    mLoadMonitor->SetLoadChangeCallback(this);
-  }
-
-  mLastStateChange = TimeStamp::Now();
-  for (auto &in_state : mTimeInState) {
-    in_state = 0;
-  }
-}
-
-LoadManagerSingleton::~LoadManagerSingleton()
-{
-  LOG(("LoadManager: shutting down LoadMonitor"));
-  MOZ_ASSERT(!mLoadMonitor, "why wasn't the LoadMonitor shut down in xpcom-shutdown?");
-  if (mLoadMonitor) {
-    mLoadMonitor->Shutdown();
-  }
-}
-
-nsresult
-LoadManagerSingleton::Observe(nsISupports* aSubject, const char* aTopic,
-                     const char16_t* aData)
-{
-  NS_ASSERTION(NS_IsMainThread(), "Observer invoked off the main thread");
-  nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
-
-  if (!strcmp(aTopic, "xpcom-shutdown")) {
-    obs->RemoveObserver(this, "xpcom-shutdown");
-    {
-      MutexAutoLock lock(mLock);
-      mObservers.Clear();
-    }
-    if (mLoadMonitor) {
-      mLoadMonitor->Shutdown();
-      mLoadMonitor = nullptr;
-    }
-
-    LOG(("Releasing LoadManager singleton and thread"));
-    // Note: won't be released immediately as the Observer has a ref to us
-    sSingleton = nullptr;
-  }
-  return NS_OK;
-}
-
-void
-LoadManagerSingleton::LoadChanged(float aSystemLoad, float aProcesLoad)
-{
-  MutexAutoLock lock(mLock);
-  // Update total load, and total amount of measured seconds.
-  mLoadSum += aSystemLoad;
-  mLoadSumMeasurements++;
-
-  if (mLoadSumMeasurements >= mAveragingMeasurements) {
-    double averagedLoad = mLoadSum / (float)mLoadSumMeasurements;
-
-    webrtc::CPULoadState newState = mCurrentState;
-
-    if (mOveruseActive || averagedLoad > mHighLoadThreshold) {
-      LOG(("LoadManager - LoadStressed"));
-      newState = webrtc::kLoadStressed;
-    } else if (averagedLoad < mLowLoadThreshold) {
-      LOG(("LoadManager - LoadRelaxed"));
-      newState = webrtc::kLoadRelaxed;
-    } else {
-      LOG(("LoadManager - LoadNormal"));
-      newState = webrtc::kLoadNormal;
-    }
-
-    if (newState != mCurrentState) {
-      LoadHasChanged(newState);
-    }
-
-    mLoadSum = 0;
-    mLoadSumMeasurements = 0;
-  }
-}
-
-void
-LoadManagerSingleton::OveruseDetected()
-{
-  LOG(("LoadManager - Overuse Detected"));
-  MutexAutoLock lock(mLock);
-  mOveruseActive = true;
-  if (mCurrentState != webrtc::kLoadStressed) {
-    LoadHasChanged(webrtc::kLoadStressed);
-  }
-}
-
-void
-LoadManagerSingleton::NormalUsage()
-{
-  LOG(("LoadManager - Overuse finished"));
-  MutexAutoLock lock(mLock);
-  mOveruseActive = false;
-}
-
-void
-LoadManagerSingleton::LoadHasChanged(webrtc::CPULoadState aNewState)
-{
-  mLock.AssertCurrentThreadOwns();
-  LOG(("LoadManager - Signaling LoadHasChanged from %d to %d to %" PRIuSIZE " listeners",
-       mCurrentState, aNewState, mObservers.Length()));
-
-  // Record how long we spent in this state for later Telemetry or display
-  TimeStamp now = TimeStamp::Now();
-  mTimeInState[mCurrentState] += (now - mLastStateChange).ToMilliseconds();
-  mLastStateChange = now;
-
-  mCurrentState = aNewState;
-  for (size_t i = 0; i < mObservers.Length(); i++) {
-    mObservers.ElementAt(i)->onLoadStateChanged(mCurrentState);
-  }
-}
-
-void
-LoadManagerSingleton::AddObserver(webrtc::CPULoadStateObserver * aObserver)
-{
-  LOG(("LoadManager - Adding Observer"));
-  MutexAutoLock lock(mLock);
-  mObservers.AppendElement(aObserver);
-}
-
-void
-LoadManagerSingleton::RemoveObserver(webrtc::CPULoadStateObserver * aObserver)
-{
-  LOG(("LoadManager - Removing Observer"));
-  MutexAutoLock lock(mLock);
-  if (!mObservers.RemoveElement(aObserver)) {
-    LOG(("LoadManager - Element to remove not found"));
-  }
-  if (mObservers.Length() == 0) {
-    // Record how long we spent in the final state for later Telemetry or display
-    TimeStamp now = TimeStamp::Now();
-    mTimeInState[mCurrentState] += (now - mLastStateChange).ToMilliseconds();
-
-    float total = 0;
-    for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mTimeInState); i++) {
-      total += mTimeInState[i];
-    }
-    // Don't include short calls; we don't have reasonable load data, and
-    // such short calls rarely reach a stable state.  Keep relatively
-    // short calls separate from longer ones
-    bool log = total > 5*PR_MSEC_PER_SEC;
-    bool small = log && total < 30*PR_MSEC_PER_SEC;
-    if (log) {
-      // Note: We don't care about rounding here; thus total may be < 100
-      Telemetry::Accumulate(small ? Telemetry::WEBRTC_LOAD_STATE_RELAXED_SHORT :
-                            Telemetry::WEBRTC_LOAD_STATE_RELAXED,
-                            (uint32_t) (mTimeInState[webrtc::CPULoadState::kLoadRelaxed]/total * 100));
-      Telemetry::Accumulate(small ? Telemetry::WEBRTC_LOAD_STATE_NORMAL_SHORT :
-                            Telemetry::WEBRTC_LOAD_STATE_NORMAL,
-                            (uint32_t) (mTimeInState[webrtc::CPULoadState::kLoadNormal]/total * 100));
-      Telemetry::Accumulate(small ? Telemetry::WEBRTC_LOAD_STATE_STRESSED_SHORT :
-                            Telemetry::WEBRTC_LOAD_STATE_STRESSED,
-                            (uint32_t) (mTimeInState[webrtc::CPULoadState::kLoadStressed]/total * 100));
-    }
-    for (auto &in_state : mTimeInState) {
-      in_state = 0;
-    }
-
-    if (mLoadMonitor) {
-      // Dance to avoid deadlock on mLock!
-      RefPtr<LoadMonitor> loadMonitor = mLoadMonitor.forget();
-      MutexAutoUnlock unlock(mLock);
-
-      loadMonitor->Shutdown();
-    }
-  }
-}
-
-
-}
deleted file mode 100644
--- a/dom/media/systemservices/LoadManager.h
+++ /dev/null
@@ -1,112 +0,0 @@
-/* -*- Mode: C++; tab-width: 50; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef _LOADMANAGER_H_
-#define _LOADMANAGER_H_
-
-#include "LoadMonitor.h"
-#include "mozilla/StaticPtr.h"
-#include "mozilla/TimeStamp.h"
-#include "mozilla/Services.h"
-#include "nsTArray.h"
-#include "nsIObserver.h"
-
-#include "webrtc/common_types.h"
-#include "webrtc/call.h"
-#include "webrtc/video/overuse_frame_detector.h"
-extern mozilla::LazyLogModule gLoadManagerLog;
-
-namespace mozilla {
-
-class LoadManagerSingleton : public LoadNotificationCallback,
-                             public webrtc::CPULoadStateCallbackInvoker,
-                             public webrtc::CpuOveruseObserver,
-                             public nsIObserver
-
-{
-public:
-    static LoadManagerSingleton* Get();
-
-    NS_DECL_THREADSAFE_ISUPPORTS
-    NS_DECL_NSIOBSERVER
-
-    // LoadNotificationCallback interface
-    void LoadChanged(float aSystemLoad, float aProcessLoad) override;
-    // CpuOveruseObserver interface
-    // Called as soon as an overuse is detected.
-    void OveruseDetected() override;
-    // Called periodically when the system is not overused any longer.
-    void NormalUsage() override;
-    // CPULoadStateCallbackInvoker interface
-    void AddObserver(webrtc::CPULoadStateObserver * aObserver) override;
-    void RemoveObserver(webrtc::CPULoadStateObserver * aObserver) override;
-
-private:
-    LoadManagerSingleton(bool aEncoderOnly,
-                         int aLoadMeasurementInterval,
-                         int aAveragingMeasurements,
-                         float aHighLoadThreshold,
-                         float aLowLoadThreshold);
-    ~LoadManagerSingleton();
-
-    void LoadHasChanged(webrtc::CPULoadState aNewState);
-
-    RefPtr<LoadMonitor> mLoadMonitor;
-
-    // This protects access to the mObservers list, the current state, and
-    // pretty much all the other members (below).
-    Mutex mLock;
-    nsTArray<webrtc::CPULoadStateObserver*> mObservers;
-    webrtc::CPULoadState mCurrentState;
-    TimeStamp mLastStateChange;
-    float mTimeInState[static_cast<int>(webrtc::kLoadLast)];
-
-    // Set when overuse was signaled to us, and hasn't been un-signaled yet.
-    bool  mOveruseActive;
-    float mLoadSum;
-    int   mLoadSumMeasurements;
-    // Load measurement settings
-    int mLoadMeasurementInterval;
-    int mAveragingMeasurements;
-    float mHighLoadThreshold;
-    float mLowLoadThreshold;
-
-    static StaticRefPtr<LoadManagerSingleton> sSingleton;
-};
-
-class LoadManager final : public webrtc::CPULoadStateCallbackInvoker,
-                          public webrtc::LoadObserver
-{
-public:
-    explicit LoadManager(LoadManagerSingleton* aManager)
-        : mManager(aManager)
-    {}
-    ~LoadManager() {}
-
-    void AddObserver(webrtc::CPULoadStateObserver * aObserver) override
-    {
-        mManager->AddObserver(aObserver);
-    }
-    void RemoveObserver(webrtc::CPULoadStateObserver * aObserver) override
-    {
-        mManager->RemoveObserver(aObserver);
-    }
-
-    void OnLoadUpdate(webrtc::LoadObserver::Load load_state) override
-    {
-        if (load_state == webrtc::LoadObserver::kOveruse) {
-            mManager->OveruseDetected();
-        } else if (load_state == webrtc::LoadObserver::kUnderuse) {
-            mManager->NormalUsage();
-        }
-    }
-
-private:
-    RefPtr<LoadManagerSingleton> mManager;
-};
-
-} //namespace
-
-#endif /* _LOADMANAGER_H_ */
deleted file mode 100644
--- a/dom/media/systemservices/LoadManagerFactory.cpp
+++ /dev/null
@@ -1,52 +0,0 @@
-/* -*- Mode: C++; tab-width: 50; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "LoadManager.h"
-#include "LoadManagerFactory.h"
-#include "MainThreadUtils.h"
-#include "nsIObserverService.h"
-
-#include "mozilla/Preferences.h"
-
-namespace mozilla {
-
-// Assume stored in an nsAutoPtr<>
-LoadManager *
-LoadManagerBuild(void)
-{
-  return new LoadManager(LoadManagerSingleton::Get());
-}
-
-/* static */  LoadManagerSingleton*
-LoadManagerSingleton::Get() {
-  if (!sSingleton) {
-    MOZ_ASSERT(NS_IsMainThread());
-
-    bool loadEncoderOnly =
-      mozilla::Preferences::GetBool("media.navigator.load_adapt.encoder_only", true);
-    int loadMeasurementInterval =
-      mozilla::Preferences::GetInt("media.navigator.load_adapt.measure_interval", 1000);
-    int averagingSeconds =
-      mozilla::Preferences::GetInt("media.navigator.load_adapt.avg_seconds", 3);
-    float highLoadThreshold =
-      mozilla::Preferences::GetFloat("media.navigator.load_adapt.high_load", 0.90f);
-    float lowLoadThreshold =
-      mozilla::Preferences::GetFloat("media.navigator.load_adapt.low_load", 0.40f);
-
-    sSingleton = new LoadManagerSingleton(loadEncoderOnly,
-                                          loadMeasurementInterval,
-                                          averagingSeconds,
-                                          highLoadThreshold,
-                                          lowLoadThreshold);
-
-    nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
-    if (obs) {
-      obs->AddObserver(sSingleton, "xpcom-shutdown", false);
-    }
-  }
-  return sSingleton;
-}
-
-}; // namespace
deleted file mode 100644
--- a/dom/media/systemservices/LoadManagerFactory.h
+++ /dev/null
@@ -1,18 +0,0 @@
-/* -*- Mode: C++; tab-width: 50; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef _LOADMANAGERFACTORY_H_
-#define _LOADMANAGERFACTORY_H_
-
-namespace mozilla {
-
-class LoadManager;
-
-mozilla::LoadManager* LoadManagerBuild();
-void LoadManagerDestroy(mozilla::LoadManager* aLoadManager);
-
-} //namespace
-
-#endif /* _LOADMANAGERFACTORY_H_ */
deleted file mode 100644
--- a/dom/media/systemservices/LoadMonitor.cpp
+++ /dev/null
@@ -1,658 +0,0 @@
-/* -*- Mode: C++; tab-width: 50; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "LoadMonitor.h"
-#include "LoadManager.h"
-#include "nsString.h"
-#include "mozilla/Logging.h"
-#include "prtime.h"
-#include "prinrval.h"
-#include "prsystem.h"
-#include "prprf.h"
-
-#include "nsString.h"
-#include "nsThreadUtils.h"
-#include "nsReadableUtils.h"
-#include "nsNetUtil.h"
-#include "nsIInputStream.h"
-#include "nsIFile.h"
-#include "nsILineInputStream.h"
-#include "nsIObserverService.h"
-#include "nsIServiceManager.h"
-
-#include "mozilla/TimeStamp.h"
-#include "mozilla/Services.h"
-
-#ifdef XP_UNIX
-#include <sys/time.h>
-#include <sys/resource.h>
-#include <unistd.h>
-#endif
-
-#ifdef XP_MACOSX
-#include <mach/mach_host.h>
-#include <mach/mach_init.h>
-#include <mach/host_info.h>
-#endif
-
-#if defined(__DragonFly__) || defined(__FreeBSD__) \
- || defined(__NetBSD__) || defined(__OpenBSD__)
-#include <sys/sysctl.h>
-# if defined(__OpenBSD__)
-#define KERN_CP_TIME KERN_CPTIME
-# endif
-#endif
-
-#if defined(__NetBSD__) || defined(__OpenBSD__)
-#include <sys/sched.h>
-#endif
-
-#ifdef XP_WIN
-#include <pdh.h>
-#include <tchar.h>
-#pragma comment(lib, "pdh.lib")
-#endif
-
-// MOZ_LOG=LoadManager:5
-#undef LOG
-#undef LOG_ENABLED
-#define LOG(args) MOZ_LOG(gLoadManagerLog, mozilla::LogLevel::Debug, args)
-#define LOG_ENABLED() MOZ_LOG_TEST(gLoadManagerLog, mozilla::LogLevel::Debug)
-#define LOG_MANY_ENABLED() MOZ_LOG_TEST(gLoadManagerLog, mozilla::LogLevel::Verbose)
-
-namespace mozilla {
-
-NS_IMPL_ISUPPORTS(LoadMonitor, nsIObserver)
-
-LoadMonitor::LoadMonitor(int aLoadUpdateInterval)
-  : mLoadUpdateInterval(aLoadUpdateInterval),
-    mLock("LoadMonitor.mLock"),
-    mCondVar(mLock, "LoadMonitor.mCondVar"),
-    mShutdownPending(false),
-    mLoadInfoThread(nullptr),
-    mSystemLoad(0.0f),
-    mProcessLoad(0.0f),
-    mLoadNotificationCallback(nullptr)
-{
-}
-
-LoadMonitor::~LoadMonitor()
-{
-  Shutdown();
-}
-
-NS_IMETHODIMP
-LoadMonitor::Observe(nsISupports* /* aSubject */,
-                     const char*  aTopic,
-                     const char16_t* /* aData */)
-{
-  MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
-  MOZ_ASSERT(!strcmp("xpcom-shutdown-threads", aTopic), "Bad topic!");
-  Shutdown();
-  return NS_OK;
-}
-
-class LoadMonitorAddObserver : public Runnable
-{
-public:
-  explicit LoadMonitorAddObserver(RefPtr<LoadMonitor> loadMonitor)
-  {
-    mLoadMonitor = loadMonitor;
-  }
-
-  NS_IMETHOD Run() override
-  {
-    nsCOMPtr<nsIObserverService> observerService =
-        mozilla::services::GetObserverService();
-    if (!observerService)
-      return NS_ERROR_FAILURE;
-
-    nsresult rv = observerService->AddObserver(mLoadMonitor, "xpcom-shutdown-threads", false);
-    NS_ENSURE_SUCCESS(rv, rv);
-
-    return NS_OK;
-  }
-
-private:
-  RefPtr<LoadMonitor> mLoadMonitor;
-};
-
-class LoadMonitorRemoveObserver : public Runnable
-{
-public:
-  explicit LoadMonitorRemoveObserver(RefPtr<LoadMonitor> loadMonitor)
-  {
-    mLoadMonitor = loadMonitor;
-  }
-
-  NS_IMETHOD Run() override
-  {
-    // remove xpcom shutdown observer
-    nsCOMPtr<nsIObserverService> observerService =
-      mozilla::services::GetObserverService();
-
-    if (observerService)
-      observerService->RemoveObserver(mLoadMonitor, "xpcom-shutdown-threads");
-
-    return NS_OK;
-  }
-
-private:
-  RefPtr<LoadMonitor> mLoadMonitor;
-};
-
-void LoadMonitor::Shutdown()
-{
-  if (mLoadInfoThread) {
-    {
-      MutexAutoLock lock(mLock);
-      LOG(("LoadMonitor: shutting down"));
-      mShutdownPending = true;
-      mCondVar.Notify();
-    }
-
-    // Note: can't just call ->Shutdown() from here; that spins the event
-    // loop here, causing re-entrancy issues if we're invoked from cycle
-    // collection.  Argh.
-    mLoadInfoThread = nullptr;
-
-    RefPtr<LoadMonitorRemoveObserver> remObsRunner = new LoadMonitorRemoveObserver(this);
-    if (!NS_IsMainThread()) {
-      NS_DispatchToMainThread(remObsRunner);
-    } else {
-      remObsRunner->Run();
-    }
-  }
-}
-
-#ifdef XP_WIN
-static LPCTSTR TotalCounterPath = _T("\\Processor(_Total)\\% Processor Time");
-
-class WinProcMon
-{
-public:
-  WinProcMon():
-    mQuery(0), mCounter(0) {};
-  ~WinProcMon();
-  nsresult Init();
-  nsresult QuerySystemLoad(float* load_percent);
-  static const uint64_t TicksPerSec = 10000000; //100nsec tick (10MHz)
-private:
-  PDH_HQUERY mQuery;
-  PDH_HCOUNTER mCounter;
-};
-
-WinProcMon::~WinProcMon()
-{
-  if (mQuery != 0) {
-    PdhCloseQuery(mQuery);
-    mQuery = 0;
-  }
-}
-
-nsresult
-WinProcMon::Init()
-{
-  PDH_HQUERY query;
-  PDH_HCOUNTER counter;
-
-  // Get a query handle to the Performance Data Helper
-  PDH_STATUS status = PdhOpenQuery(
-                        NULL,      // No log file name: use real-time source
-                        0,         // zero out user data token: unsued
-                        &query);
-
-  if (status != ERROR_SUCCESS) {
-    LOG(("PdhOpenQuery error = %X", status));
-    return NS_ERROR_FAILURE;
-  }
-
-  // Add a pre-defined high performance counter to the query.
-  // This one is for the total CPU usage.
-  status = PdhAddCounter(query, TotalCounterPath, 0, &counter);
-
-  if (status != ERROR_SUCCESS) {
-    PdhCloseQuery(query);
-    LOG(("PdhAddCounter (_Total) error = %X", status));
-    return NS_ERROR_FAILURE;
-  }
-
-  // Need to make an initial query call to set up data capture.
-  status = PdhCollectQueryData(query);
-
-  if (status != ERROR_SUCCESS) {
-    PdhCloseQuery(query);
-    LOG(("PdhCollectQueryData (init) error = %X", status));
-    return NS_ERROR_FAILURE;
-  }
-
-  mQuery = query;
-  mCounter = counter;
-  return NS_OK;
-}
-
-nsresult WinProcMon::QuerySystemLoad(float* load_percent)
-{
-  *load_percent = 0;
-
-  if (mQuery == 0) {
-    return NS_ERROR_FAILURE;
-  }
-
-  // Update all counters associated with this query object.
-  PDH_STATUS status = PdhCollectQueryData(mQuery);
-
-  if (status != ERROR_SUCCESS) {
-    LOG(("PdhCollectQueryData error = %X", status));
-    return NS_ERROR_FAILURE;
-  }
-
-  PDH_FMT_COUNTERVALUE counter;
-  // maximum is 100% regardless of CPU core count.
-  status = PdhGetFormattedCounterValue(
-               mCounter,
-               PDH_FMT_DOUBLE,
-               (LPDWORD)NULL,
-               &counter);
-
-  if (ERROR_SUCCESS != status ||
-      // There are multiple success return values.
-      !IsSuccessSeverity(counter.CStatus)) {
-    LOG(("PdhGetFormattedCounterValue error"));
-    return NS_ERROR_FAILURE;
-  }
-
-  // The result is a percent value, reduce to match expected scale.
-  *load_percent = (float)(counter.doubleValue / 100.0f);
-  return NS_OK;
-}
-#endif
-
-// Use a non-generic class name, because otherwise we can get name collisions
-// with other classes in the codebase.  The normal way of dealing with that is
-// to put the class in an anonymous namespace, but this class is used as a
-// member of RTCLoadInfo, which can't be in the anonymous namespace, so it also
-// can't be in an anonymous namespace: gcc warns about that setup and this
-// directory is fail-on-warnings.
-class RTCLoadStats
-{
-public:
-  RTCLoadStats() :
-    mPrevTotalTimes(0),
-    mPrevCpuTimes(0),
-    mPrevLoad(0) {};
-
-  double GetLoad() { return (double)mPrevLoad; };
-
-  uint64_t mPrevTotalTimes;
-  uint64_t mPrevCpuTimes;
-  float mPrevLoad;               // Previous load value.
-};
-
-// Use a non-generic class name, because otherwise we can get name collisions
-// with other classes in the codebase.  The normal way of dealing with that is
-// to put the class in an anonymous namespace, but this class is used as a
-// member of LoadInfoCollectRunner, which can't be in the anonymous namespace,
-// so it also can't be in an anonymous namespace: gcc warns about that setup
-// and this directory is fail-on-warnings.
-class RTCLoadInfo final
-{
-private:
-  ~RTCLoadInfo() {}
-
-public:
-  NS_INLINE_DECL_REFCOUNTING(RTCLoadInfo)
-
-  RTCLoadInfo(): mLoadUpdateInterval(0) {};
-  nsresult Init(int aLoadUpdateInterval);
-  double GetSystemLoad() { return mSystemLoad.GetLoad(); };
-  double GetProcessLoad() { return mProcessLoad.GetLoad(); };
-  nsresult UpdateSystemLoad();
-  nsresult UpdateProcessLoad();
-
-private:
-  void UpdateCpuLoad(uint64_t ticks_per_interval,
-                     uint64_t current_total_times,
-                     uint64_t current_cpu_times,
-                     RTCLoadStats* loadStat);
-#ifdef XP_WIN
-  WinProcMon mSysMon;
-  HANDLE mProcHandle;
-  int mNumProcessors;
-#endif
-  RTCLoadStats mSystemLoad;
-  RTCLoadStats mProcessLoad;
-  uint64_t mTicksPerInterval;
-  int mLoadUpdateInterval;
-};
-
-nsresult RTCLoadInfo::Init(int aLoadUpdateInterval)
-{
-  mLoadUpdateInterval = aLoadUpdateInterval;
-#ifdef XP_WIN
-  mTicksPerInterval = (WinProcMon::TicksPerSec /*Hz*/
-                       * mLoadUpdateInterval /*msec*/) / 1000 ;
-  mNumProcessors = PR_GetNumberOfProcessors();
-  mProcHandle = GetCurrentProcess();
-  return mSysMon.Init();
-#else
-  mTicksPerInterval = (sysconf(_SC_CLK_TCK) * mLoadUpdateInterval) / 1000;
-  return NS_OK;
-#endif
-}
-
-void RTCLoadInfo::UpdateCpuLoad(uint64_t ticks_per_interval,
-                                uint64_t current_total_times,
-                                uint64_t current_cpu_times,
-                                RTCLoadStats *loadStat) {
-  // Check if we get an inconsistent number of ticks.
-  if (((current_total_times - loadStat->mPrevTotalTimes)
-       > (ticks_per_interval * 10))
-      || current_total_times < loadStat->mPrevTotalTimes
-      || current_cpu_times < loadStat->mPrevCpuTimes) {
-    // Bug at least on the Nexus 4 and Galaxy S4
-    // https://code.google.com/p/android/issues/detail?id=41630
-    // We do need to update our previous times, or we can get stuck
-    // when there is a blip upwards and then we get a bunch of consecutive
-    // lower times. Just skip the load calculation.
-    LOG(("Inconsistent time values are passed. ignored"));
-    // Try to recover next tick
-    loadStat->mPrevTotalTimes = current_total_times;
-    loadStat->mPrevCpuTimes = current_cpu_times;
-    return;
-  }
-
-  const uint64_t cpu_diff = current_cpu_times - loadStat->mPrevCpuTimes;
-  const uint64_t total_diff = current_total_times - loadStat->mPrevTotalTimes;
-  if (total_diff > 0) {
-#ifdef XP_WIN
-    float result =  (float)cpu_diff / (float)total_diff/ (float)mNumProcessors;
-#else
-    float result =  (float)cpu_diff / (float)total_diff;
-#endif
-    loadStat->mPrevLoad = result;
-  }
-  loadStat->mPrevTotalTimes = current_total_times;
-  loadStat->mPrevCpuTimes = current_cpu_times;
-}
-
-nsresult RTCLoadInfo::UpdateSystemLoad()
-{
-#if defined(LINUX) || defined(ANDROID)
-  nsCOMPtr<nsIFile> procStatFile = do_CreateInstance(NS_LOCAL_FILE_CONTRACTID);
-  procStatFile->InitWithPath(NS_LITERAL_STRING("/proc/stat"));
-
-  nsCOMPtr<nsIInputStream> fileInputStream;
-  nsresult rv = NS_NewLocalFileInputStream(getter_AddRefs(fileInputStream),
-                                           procStatFile);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  nsCOMPtr<nsILineInputStream> lineInputStream = do_QueryInterface(fileInputStream, &rv);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  nsAutoCString buffer;
-  bool isMore = true;
-  lineInputStream->ReadLine(buffer, &isMore);
-
-  uint64_t user;
-  uint64_t nice;
-  uint64_t system;
-  uint64_t idle;
-  if (PR_sscanf(buffer.get(), "cpu %llu %llu %llu %llu",
-                &user, &nice,
-                &system, &idle) != 4) {
-    LOG(("Error parsing /proc/stat"));
-    return NS_ERROR_FAILURE;
-  }
-
-  const uint64_t cpu_times = nice + system + user;
-  const uint64_t total_times = cpu_times + idle;
-
-  UpdateCpuLoad(mTicksPerInterval,
-                total_times,
-                cpu_times,
-                &mSystemLoad);
-  return NS_OK;
-#elif defined(XP_MACOSX)
-  mach_msg_type_number_t info_cnt = HOST_CPU_LOAD_INFO_COUNT;
-  host_cpu_load_info_data_t load_info;
-  kern_return_t rv = host_statistics(mach_host_self(), HOST_CPU_LOAD_INFO,
-                                     (host_info_t)(&load_info), &info_cnt);
-
-  if (rv != KERN_SUCCESS || info_cnt != HOST_CPU_LOAD_INFO_COUNT) {
-    LOG(("Error from mach/host_statistics call"));
-    return NS_ERROR_FAILURE;
-  }
-
-  const uint64_t cpu_times = load_info.cpu_ticks[CPU_STATE_NICE]
-                           + load_info.cpu_ticks[CPU_STATE_SYSTEM]
-                           + load_info.cpu_ticks[CPU_STATE_USER];
-  const uint64_t total_times = cpu_times + load_info.cpu_ticks[CPU_STATE_IDLE];
-
-  UpdateCpuLoad(mTicksPerInterval,
-                total_times,
-                cpu_times,
-                &mSystemLoad);
-  return NS_OK;
-#elif defined(__DragonFly__) || defined(__FreeBSD__) \
-   || defined(__NetBSD__) || defined(__OpenBSD__)
-#if defined(__NetBSD__)
-  uint64_t cp_time[CPUSTATES];
-#else
-  long cp_time[CPUSTATES];
-#endif // __NetBSD__
-  size_t sz = sizeof(cp_time);
-#ifdef KERN_CP_TIME
-  int mib[] = {
-    CTL_KERN,
-    KERN_CP_TIME,
-  };
-  u_int miblen = sizeof(mib) / sizeof(mib[0]);
-  if (sysctl(mib, miblen, &cp_time, &sz, nullptr, 0)) {
-#else
-  if (sysctlbyname("kern.cp_time", &cp_time, &sz, nullptr, 0)) {
-#endif // KERN_CP_TIME
-    LOG(("sysctl kern.cp_time failed"));
-    return NS_ERROR_FAILURE;
-  }
-
-  const uint64_t cpu_times = cp_time[CP_NICE]
-                           + cp_time[CP_SYS]
-                           + cp_time[CP_INTR]
-                           + cp_time[CP_USER];
-  const uint64_t total_times = cpu_times + cp_time[CP_IDLE];
-
-  UpdateCpuLoad(mTicksPerInterval,
-                total_times,
-                cpu_times,
-                &mSystemLoad);
-  return NS_OK;
-#elif defined(XP_WIN)
-  float load;
-  nsresult rv = mSysMon.QuerySystemLoad(&load);
-
-  if (rv == NS_OK) {
-    mSystemLoad.mPrevLoad = load;
-  }
-
-  return rv;
-#else
-  // Not implemented
-  return NS_OK;
-#endif
-}
-
-nsresult RTCLoadInfo::UpdateProcessLoad() {
-#if defined(XP_UNIX)
-  struct timeval tv;
-  gettimeofday(&tv, nullptr);
-  const uint64_t total_times = tv.tv_sec * PR_USEC_PER_SEC + tv.tv_usec;
-
-  rusage usage;
-  if (getrusage(RUSAGE_SELF, &usage) < 0) {
-    LOG(("getrusage failed"));
-    return NS_ERROR_FAILURE;
-  }
-
-  const uint64_t cpu_times =
-      (usage.ru_utime.tv_sec + usage.ru_stime.tv_sec) * PR_USEC_PER_SEC +
-       usage.ru_utime.tv_usec + usage.ru_stime.tv_usec;
-
-  UpdateCpuLoad(PR_USEC_PER_MSEC * mLoadUpdateInterval,
-                total_times,
-                cpu_times,
-                &mProcessLoad);
-#elif defined(XP_WIN)
-  FILETIME clk_time, sys_time, user_time;
-  uint64_t total_times, cpu_times;
-
-  GetSystemTimeAsFileTime(&clk_time);
-  total_times = (((uint64_t)clk_time.dwHighDateTime) << 32)
-                + (uint64_t)clk_time.dwLowDateTime;
-  BOOL ok = GetProcessTimes(mProcHandle, &clk_time, &clk_time, &sys_time, &user_time);
-
-  if (ok == 0) {
-    return NS_ERROR_FAILURE;
-  }
-
-  cpu_times = (((uint64_t)sys_time.dwHighDateTime
-                + (uint64_t)user_time.dwHighDateTime) << 32)
-              + (uint64_t)sys_time.dwLowDateTime
-              + (uint64_t)user_time.dwLowDateTime;
-
-  UpdateCpuLoad(mTicksPerInterval,
-                total_times,
-                cpu_times,
-                &mProcessLoad);
-#endif
-  return NS_OK;
-}
-
-// Note: This class can't be in the anonymous namespace, because then we can't
-// declare it as a friend of LoadMonitor.
-class LoadInfoCollectRunner : public Runnable
-{
-public:
-  LoadInfoCollectRunner(RefPtr<LoadMonitor> loadMonitor,
-                        RefPtr<RTCLoadInfo> loadInfo,
-                        nsIThread *loadInfoThread)
-    : mThread(loadInfoThread),
-      mLoadUpdateInterval(loadMonitor->mLoadUpdateInterval),
-      mLoadNoiseCounter(0)
-  {
-    mLoadMonitor = loadMonitor;
-    mLoadInfo = loadInfo;
-  }
-
-  NS_IMETHOD Run() override
-  {
-    if (NS_IsMainThread()) {
-      if (mThread) {
-        // Don't leak threads!
-        mThread->Shutdown(); // can't Shutdown from the thread itself, darn
-        // Don't null out mThread!
-        // See bug 999104.  We must hold a ref to the thread across Dispatch()
-        // since the internal mThread ref could be released while processing
-        // the Dispatch(), and Dispatch/PutEvent itself doesn't hold a ref; it
-        // assumes the caller does.
-      }
-      return NS_OK;
-    }
-
-    MutexAutoLock lock(mLoadMonitor->mLock);
-    while (!mLoadMonitor->mShutdownPending) {
-      mLoadInfo->UpdateSystemLoad();
-      mLoadInfo->UpdateProcessLoad();
-      float sysLoad = mLoadInfo->GetSystemLoad();
-      float procLoad = mLoadInfo->GetProcessLoad();
-
-      if ((++mLoadNoiseCounter % (LOG_MANY_ENABLED() ? 1 : 10)) == 0) {
-        LOG(("System Load: %f Process Load: %f", sysLoad, procLoad));
-        mLoadNoiseCounter = 0;
-      }
-      mLoadMonitor->SetSystemLoad(sysLoad);
-      mLoadMonitor->SetProcessLoad(procLoad);
-      mLoadMonitor->FireCallbacks();
-
-      mLoadMonitor->mCondVar.Wait(PR_MillisecondsToInterval(mLoadUpdateInterval));
-    }
-    // ok, we need to exit safely and can't shut ourselves down (DARN)
-    NS_DispatchToMainThread(this);
-    return NS_OK;
-  }
-
-private:
-  nsCOMPtr<nsIThread> mThread;
-  RefPtr<RTCLoadInfo> mLoadInfo;
-  RefPtr<LoadMonitor> mLoadMonitor;
-  int mLoadUpdateInterval;
-  int mLoadNoiseCounter;
-};
-
-void
-LoadMonitor::SetProcessLoad(float load) {
-  mLock.AssertCurrentThreadOwns();
-  mProcessLoad = load;
-}
-
-void
-LoadMonitor::SetSystemLoad(float load) {
-  mLock.AssertCurrentThreadOwns();
-  mSystemLoad = load;
-}
-
-float
-LoadMonitor::GetProcessLoad() {
-  MutexAutoLock lock(mLock);
-  float load = mProcessLoad;
-  return load;
-}
-
-void
-LoadMonitor::FireCallbacks() {
-  if (mLoadNotificationCallback) {
-    mLoadNotificationCallback->LoadChanged(mSystemLoad, mProcessLoad);
-  }
-}
-
-float
-LoadMonitor::GetSystemLoad() {
-  MutexAutoLock lock(mLock);
-  float load = mSystemLoad;
-  return load;
-}
-
-nsresult
-LoadMonitor::Init(RefPtr<LoadMonitor> &self)
-{
-  LOG(("Initializing LoadMonitor"));
-
-  RefPtr<RTCLoadInfo> load_info = new RTCLoadInfo();
-  nsresult rv = load_info->Init(mLoadUpdateInterval);
-
-  if (NS_FAILED(rv)) {
-    LOG(("RTCLoadInfo::Init error"));
-    return rv;
-  }
-
-  RefPtr<LoadMonitorAddObserver> addObsRunner = new LoadMonitorAddObserver(self);
-  NS_DispatchToMainThread(addObsRunner);
-
-  NS_NewNamedThread("Sys Load Info", getter_AddRefs(mLoadInfoThread));
-
-  RefPtr<LoadInfoCollectRunner> runner =
-    new LoadInfoCollectRunner(self, load_info, mLoadInfoThread);
-  mLoadInfoThread->Dispatch(runner, NS_DISPATCH_NORMAL);
-
-  return NS_OK;
-}
-
-void
-LoadMonitor::SetLoadChangeCallback(LoadNotificationCallback* aCallback)
-{
-  mLoadNotificationCallback = aCallback;
-}
-
-}
deleted file mode 100644
--- a/dom/media/systemservices/LoadMonitor.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/* -*- Mode: C++; tab-width: 50; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef _LOADMONITOR_H_
-#define _LOADMONITOR_H_
-
-#include "mozilla/Mutex.h"
-#include "mozilla/CondVar.h"
-#include "mozilla/RefPtr.h"
-#include "mozilla/Atomics.h"
-#include "nsCOMPtr.h"
-#include "nsIThread.h"
-#include "nsIObserver.h"
-
-namespace mozilla {
-class LoadInfoCollectRunner;
-
-class LoadNotificationCallback
-{
-public:
-    virtual void LoadChanged(float aSystemLoad, float aProcessLoad) = 0;
-};
-
-class LoadMonitor final : public nsIObserver
-{
-public:
-    NS_DECL_THREADSAFE_ISUPPORTS
-    NS_DECL_NSIOBSERVER
-
-    explicit LoadMonitor(int aLoadUpdateInterval);
-
-    nsresult Init(RefPtr<LoadMonitor> &self);
-    void SetLoadChangeCallback(LoadNotificationCallback* aCallback);
-    void Shutdown();
-    float GetSystemLoad();
-    float GetProcessLoad();
-
-    friend class LoadInfoCollectRunner;
-
-private:
-    ~LoadMonitor();
-
-    void SetProcessLoad(float load);
-    void SetSystemLoad(float load);
-    void FireCallbacks();
-
-    int                  mLoadUpdateInterval;
-    mozilla::Mutex       mLock;
-    mozilla::CondVar     mCondVar;
-    bool                 mShutdownPending;
-    nsCOMPtr<nsIThread>  mLoadInfoThread;
-    float                mSystemLoad;
-    float                mProcessLoad;
-    LoadNotificationCallback* mLoadNotificationCallback;
-};
-
-} //namespace
-
-#endif /* _LOADMONITOR_H_ */
--- a/dom/media/systemservices/VideoEngine.cpp
+++ b/dom/media/systemservices/VideoEngine.cpp
@@ -3,19 +3,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "VideoEngine.h"
 #include "webrtc/video_engine/browser_capture_impl.h"
 #ifdef WEBRTC_ANDROID
 #include "webrtc/modules/video_capture/video_capture.h"
-#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
-#include "webrtc/modules/video_render/video_render.h"
-#endif
 #endif
 
 
 namespace mozilla {
 namespace camera {
 
 #undef LOG
 #undef LOG_ENABLED
@@ -43,28 +40,26 @@ int VideoEngine::SetAndroidObjects(JavaV
 }
 #endif
 
 void
 VideoEngine::CreateVideoCapture(int32_t& id, const char* deviceUniqueIdUTF8) {
   LOG((__PRETTY_FUNCTION__));
   id = GenerateId();
   LOG(("CaptureDeviceInfo.type=%s id=%d",mCaptureDevInfo.TypeName(),id));
-  CaptureEntry entry = {-1,nullptr,nullptr};
+  CaptureEntry entry = {-1, nullptr};
 
   if (mCaptureDevInfo.type == webrtc::CaptureDeviceType::Camera) {
     entry = CaptureEntry(id,
-		         webrtc::VideoCaptureFactory::Create(id, deviceUniqueIdUTF8),
-                         nullptr);
+		         webrtc::VideoCaptureFactory::Create(deviceUniqueIdUTF8));
   } else {
 #ifndef WEBRTC_ANDROID
     entry = CaptureEntry(
 	      id,
-	      webrtc::DesktopCaptureImpl::Create(id, deviceUniqueIdUTF8, mCaptureDevInfo.type),
-              nullptr);
+	      webrtc::DesktopCaptureImpl::Create(id, deviceUniqueIdUTF8, mCaptureDevInfo.type));
 #else
     MOZ_ASSERT("CreateVideoCapture NO DESKTOP CAPTURE IMPL ON ANDROID" == nullptr);
 #endif
   }
   mCaps.emplace(id,std::move(entry));
 }
 
 int
@@ -79,17 +74,17 @@ VideoEngine::ReleaseVideoCapture(const i
 
 std::shared_ptr<webrtc::VideoCaptureModule::DeviceInfo>
 VideoEngine::GetOrCreateVideoCaptureDeviceInfo() {
   if (mDeviceInfo) {
     return mDeviceInfo;
   }
   switch (mCaptureDevInfo.type) {
     case webrtc::CaptureDeviceType::Camera: {
-      mDeviceInfo.reset(webrtc::VideoCaptureFactory::CreateDeviceInfo(0));
+      mDeviceInfo.reset(webrtc::VideoCaptureFactory::CreateDeviceInfo());
       break;
     }
     case webrtc::CaptureDeviceType::Browser: {
       mDeviceInfo.reset(webrtc::BrowserDeviceInfoImpl::CreateDeviceInfo());
       break;
     }
     // Window, Application, and Screen types are handled by DesktopCapture
     case webrtc::CaptureDeviceType::Window:
@@ -102,60 +97,40 @@ VideoEngine::GetOrCreateVideoCaptureDevi
       mDeviceInfo.reset();
 #endif
       break;
     }
   }
   return mDeviceInfo;
 }
 
-void
-VideoEngine::RemoveRenderer(int capnum) {
-  WithEntry(capnum, [](CaptureEntry& cap) {
-    cap.mVideoRender = nullptr;
-  });
-}
-
 const UniquePtr<const webrtc::Config>&
 VideoEngine::GetConfiguration() {
   return mConfig;
 }
 
 RefPtr<VideoEngine> VideoEngine::Create(UniquePtr<const webrtc::Config>&& aConfig) {
   LOG((__PRETTY_FUNCTION__));
   LOG(("Creating new VideoEngine with CaptureDeviceType %s",
        aConfig->Get<webrtc::CaptureDeviceInfo>().TypeName()));
   RefPtr<VideoEngine> engine(new VideoEngine(std::move(aConfig)));
   return engine;
 }
 
 VideoEngine::CaptureEntry::CaptureEntry(int32_t aCapnum,
-                                        rtc::scoped_refptr<webrtc::VideoCaptureModule> aCapture,
-                                        webrtc::VideoRender * aRenderer):
-    mCapnum(aCapnum),
-    mVideoCaptureModule(aCapture),
-    mVideoRender(aRenderer)
+                                        rtc::scoped_refptr<webrtc::VideoCaptureModule> aCapture)
+  : mCapnum(aCapnum)
+  , mVideoCaptureModule(aCapture)
 {}
 
 rtc::scoped_refptr<webrtc::VideoCaptureModule>
 VideoEngine::CaptureEntry::VideoCapture() {
   return mVideoCaptureModule;
 }
 
-const UniquePtr<webrtc::VideoRender>&
-VideoEngine::CaptureEntry::VideoRenderer() {
-  if (!mVideoRender) {
-     MOZ_ASSERT(mCapnum != -1);
-     // Create a VideoRender on demand
-     mVideoRender = UniquePtr<webrtc::VideoRender>(
-         webrtc::VideoRender::CreateVideoRender(mCapnum,nullptr,false,webrtc::kRenderExternal));
-   }
-  return mVideoRender;
-}
-
 int32_t
 VideoEngine::CaptureEntry::Capnum() const {
   return mCapnum;
 }
 
 bool VideoEngine::WithEntry(const int32_t entryCapnum,
 			    const std::function<void(CaptureEntry &entry)>&& fn) {
   auto it = mCaps.find(entryCapnum);
--- a/dom/media/systemservices/VideoEngine.h
+++ b/dom/media/systemservices/VideoEngine.h
@@ -5,19 +5,17 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_VideoEngine_h
 #define mozilla_VideoEngine_h
 
 #include "MediaEngine.h"
 #include "VideoFrameUtils.h"
 #include "mozilla/media/MediaUtils.h"
-#include "webrtc/common.h"
 #include "webrtc/modules/video_capture/video_capture_impl.h"
-#include "webrtc/modules/video_render/video_render.h"
 #include "webrtc/modules/video_capture/video_capture_defines.h"
 #include "webrtc/modules/video_capture/video_capture_factory.h"
 #include "webrtc/video_engine/desktop_capture_impl.h"
 #include <memory>
 #include <functional>
 
 namespace mozilla {
 namespace camera {
@@ -48,18 +46,16 @@ public:
   *   It is cached to prevent repeated lengthy polling for "realness"
   *   of the hardware devices.  This could be handled in a more elegant
   *   way in the future.
   *   @return on failure the shared_ptr will be null, otherwise it will contain a DeviceInfo.
   *   @see bug 1305212 https://bugzilla.mozilla.org/show_bug.cgi?id=1305212
   */
   std::shared_ptr<webrtc::VideoCaptureModule::DeviceInfo> GetOrCreateVideoCaptureDeviceInfo();
 
-  void RemoveRenderer(int capnum);
-
   const UniquePtr<const webrtc::Config>& GetConfiguration();
 
   void Startup() {
     mIsRunning = true;
   }
 
   void Shutdown() {
     mIsRunning = false;
@@ -67,25 +63,22 @@ public:
 
   bool IsRunning() const {
     return mIsRunning;
   }
 
   class CaptureEntry {
   public:
     CaptureEntry(int32_t aCapnum,
-                 rtc::scoped_refptr<webrtc::VideoCaptureModule> aCapture,
-                 webrtc::VideoRender* aRenderer);
+                 rtc::scoped_refptr<webrtc::VideoCaptureModule> aCapture);
     int32_t Capnum() const;
     rtc::scoped_refptr<webrtc::VideoCaptureModule> VideoCapture();
-    const UniquePtr<webrtc::VideoRender> & VideoRenderer();
   private:
     int32_t mCapnum;
     rtc::scoped_refptr<webrtc::VideoCaptureModule> mVideoCaptureModule;
-    UniquePtr<webrtc::VideoRender> mVideoRender;
     friend class VideoEngine;
   };
 
   // Returns true iff an entry for capnum exists
   bool WithEntry(const int32_t entryCapnum, const std::function<void(CaptureEntry &entry)>&& fn);
 
 private:
   explicit VideoEngine(UniquePtr<const webrtc::Config>&& aConfig);
--- a/dom/media/systemservices/VideoFrameUtils.cpp
+++ b/dom/media/systemservices/VideoFrameUtils.cpp
@@ -9,83 +9,83 @@
 #include "mozilla/ShmemPool.h"
 
 namespace mozilla {
 
 size_t
 VideoFrameUtils::TotalRequiredBufferSize(
                   const webrtc::VideoFrame& aVideoFrame)
 {
-  static const webrtc::PlaneType kPlanes[] =
-                  {webrtc::kYPlane, webrtc::kUPlane, webrtc::kVPlane};
-  if (aVideoFrame.IsZeroSize()) {
-    return 0;
-  }
-
-  size_t sum = 0;
-  for (auto plane : kPlanes) {
-    sum += aVideoFrame.allocated_size(plane);
-  }
-  return sum;
+  auto height = aVideoFrame.video_frame_buffer()->height();
+  return height * aVideoFrame.video_frame_buffer()->StrideY() +
+    ((height+1)/2) * aVideoFrame.video_frame_buffer()->StrideU() +
+    ((height+1)/2) * aVideoFrame.video_frame_buffer()->StrideV();
 }
 
 void VideoFrameUtils::InitFrameBufferProperties(
                   const webrtc::VideoFrame& aVideoFrame,
                   camera::VideoFrameProperties& aDestProps)
 {
   // The VideoFrameBuffer image data stored in the accompanying buffer
   // the buffer is at least this size of larger.
   aDestProps.bufferSize() = TotalRequiredBufferSize(aVideoFrame);
 
   aDestProps.timeStamp() = aVideoFrame.timestamp();
   aDestProps.ntpTimeMs() = aVideoFrame.ntp_time_ms();
   aDestProps.renderTimeMs() = aVideoFrame.render_time_ms();
 
   aDestProps.rotation() = aVideoFrame.rotation();
 
-  aDestProps.yAllocatedSize() = aVideoFrame.allocated_size(webrtc::kYPlane);
-  aDestProps.uAllocatedSize() = aVideoFrame.allocated_size(webrtc::kYPlane);
-  aDestProps.vAllocatedSize() = aVideoFrame.allocated_size(webrtc::kYPlane);
+  auto height = aVideoFrame.video_frame_buffer()->height();
+  aDestProps.yAllocatedSize() = height * aVideoFrame.video_frame_buffer()->StrideY();
+  aDestProps.uAllocatedSize() = ((height+1)/2) * aVideoFrame.video_frame_buffer()->StrideU();
+  aDestProps.vAllocatedSize() = ((height+1)/2) * aVideoFrame.video_frame_buffer()->StrideV();
 
-  aDestProps.width() = aVideoFrame.width();
-  aDestProps.height() = aVideoFrame.height();
+  aDestProps.width() = aVideoFrame.video_frame_buffer()->width();
+  aDestProps.height() = height;
 
-  aDestProps.yStride() = aVideoFrame.stride(webrtc::kYPlane);
-  aDestProps.uStride() = aVideoFrame.stride(webrtc::kUPlane);
-  aDestProps.vStride() = aVideoFrame.stride(webrtc::kVPlane);
+  aDestProps.yStride() = aVideoFrame.video_frame_buffer()->StrideY();
+  aDestProps.uStride() = aVideoFrame.video_frame_buffer()->StrideU();
+  aDestProps.vStride() = aVideoFrame.video_frame_buffer()->StrideV();
 }
 
 void VideoFrameUtils::CopyVideoFrameBuffers(uint8_t* aDestBuffer,
                        const size_t aDestBufferSize,
                        const webrtc::VideoFrame& aFrame)
 {
-  static const webrtc::PlaneType planes[] = {webrtc::kYPlane, webrtc::kUPlane, webrtc::kVPlane};
-
   size_t aggregateSize = TotalRequiredBufferSize(aFrame);
 
   MOZ_ASSERT(aDestBufferSize >= aggregateSize);
 
   // If planes are ordered YUV and contiguous then do a single copy
-  if ((aFrame.buffer(webrtc::kYPlane) != nullptr)
-    // Check that the three planes are ordered
-    && (aFrame.buffer(webrtc::kYPlane) < aFrame.buffer(webrtc::kUPlane))
-    && (aFrame.buffer(webrtc::kUPlane) < aFrame.buffer(webrtc::kVPlane))
-    //  Check that the last plane ends at firstPlane[totalsize]
-    && (&aFrame.buffer(webrtc::kYPlane)[aggregateSize] == &aFrame.buffer(webrtc::kVPlane)[aFrame.allocated_size(webrtc::kVPlane)]))
+  if ((aFrame.video_frame_buffer()->DataY() != nullptr)
+      // Check that the three planes are ordered
+      && (aFrame.video_frame_buffer()->DataY() < aFrame.video_frame_buffer()->DataU())
+      && (aFrame.video_frame_buffer()->DataU() < aFrame.video_frame_buffer()->DataV())
+      //  Check that the last plane ends at firstPlane[totalsize]
+      && (&aFrame.video_frame_buffer()->DataY()[aggregateSize] ==
+          &aFrame.video_frame_buffer()->DataV()[((aFrame.video_frame_buffer()->height()+1)/2) *
+                                                aFrame.video_frame_buffer()->StrideV()]))
   {
-    memcpy(aDestBuffer,aFrame.buffer(webrtc::kYPlane),aggregateSize);
+    memcpy(aDestBuffer, aFrame.video_frame_buffer()->DataY(), aggregateSize);
     return;
   }
 
   // Copy each plane
   size_t offset = 0;
-  for (auto plane: planes) {
-    memcpy(&aDestBuffer[offset], aFrame.buffer(plane), aFrame.allocated_size(plane));
-    offset += aFrame.allocated_size(plane);
-  }
+  size_t size;
+  auto height = aFrame.video_frame_buffer()->height();
+  size = height * aFrame.video_frame_buffer()->StrideY();
+  memcpy(&aDestBuffer[offset], aFrame.video_frame_buffer()->DataY(), size);
+  offset += size;
+  size = ((height+1)/2) * aFrame.video_frame_buffer()->StrideU();
+  memcpy(&aDestBuffer[offset], aFrame.video_frame_buffer()->DataU(), size);
+  offset += size;
+  size = ((height+1)/2) * aFrame.video_frame_buffer()->StrideV();
+  memcpy(&aDestBuffer[offset], aFrame.video_frame_buffer()->DataV(), size);
 }
 
 void VideoFrameUtils::CopyVideoFrameBuffers(ShmemBuffer& aDestShmem,
                         const webrtc::VideoFrame& aVideoFrame)
 {
   CopyVideoFrameBuffers(aDestShmem.Get().get<uint8_t>(), aDestShmem.Get().Size<uint8_t>(), aVideoFrame);
 }
 
--- a/dom/media/systemservices/moz.build
+++ b/dom/media/systemservices/moz.build
@@ -3,40 +3,36 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 if CONFIG['MOZ_WEBRTC']:
     EXPORTS += [
         'CamerasChild.h',
         'CamerasParent.h',
-        'LoadManager.h',
-        'LoadManagerFactory.h',
-        'LoadMonitor.h',
         'VideoEngine.h',
         'VideoFrameUtils.h'
     ]
     UNIFIED_SOURCES += [
         'CamerasChild.cpp',
         'CamerasParent.cpp',
-        'LoadManager.cpp',
-        'LoadManagerFactory.cpp',
-        'LoadMonitor.cpp',
         'ShmemPool.cpp',
         'VideoEngine.cpp',
         'VideoFrameUtils.cpp'
     ]
     LOCAL_INCLUDES += [
         '/media/webrtc/signaling',
         '/media/webrtc/trunk',
     ]
 if CONFIG['OS_TARGET'] == 'WINNT':
     DEFINES['WEBRTC_WIN'] = True
 else:
     DEFINES['WEBRTC_POSIX'] = True
+    # Must match build/gyp.mozbuild: enable_libevent
+    DEFINES['WEBRTC_BUILD_LIBEVENT'] = True
 
 if CONFIG['OS_TARGET'] == 'Android':
     DEFINES['WEBRTC_ANDROID'] = True
 
 
 if CONFIG['OS_TARGET'] == 'Android':
     EXPORTS += [
         'OpenSLESProvider.h'
--- a/dom/media/tests/mochitest/head.js
+++ b/dom/media/tests/mochitest/head.js
@@ -904,33 +904,51 @@ AudioStreamHelper.prototype = {
   checkAudioNotFlowing: function(stream) {
     var analyser = new AudioStreamAnalyser(this._context, stream);
     var freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
     return this.checkAudio(stream, analyser, array => array[freq] < 50);
   }
 }
 
 class VideoFrameEmitter {
-  constructor(color1, color2) {
-    this._helper = new CaptureStreamTestHelper2D(50,50);
+  constructor(color1, color2, size) {
+    if (!size) {
+      size = 50;
+    }
+    this._helper = new CaptureStreamTestHelper2D(size, size);
     this._canvas = this._helper.createAndAppendElement('canvas', 'source_canvas');
     this._color1 = color1 ? color1 : this._helper.green;
     this._color2 = color2 ? color2 : this._helper.red;
     // Make sure this is initted
     this._helper.drawColor(this._canvas, this._color1);
     this._stream = this._canvas.captureStream();
     this._started = false;
   }
 
   stream() {
     return this._stream;
   }
 
+  helper() {
+    return this._helper;
+  }
+
+  colors(color1, color2) {
+    this._color1 = color1 ? color1 : this._helper.green;
+    this._color2 = color2 ? color2 : this._helper.red;
+    try {
+      this._helper.drawColor(this._canvas, this._color1);
+    } catch (e) {
+      // ignore; stream might have shut down
+    }
+  }
+
   start() {
     if (this._started) {
+      info("*** emitter already started");
       return;
     }
 
     let i = 0;
     this._started = true;
     this._intervalId = setInterval(() => {
       try {
         this._helper.drawColor(this._canvas, i ? this._color1: this._color2);
--- a/dom/media/tests/mochitest/pc.js
+++ b/dom/media/tests/mochitest/pc.js
@@ -1440,16 +1440,17 @@ PeerConnectionWrapper.prototype = {
    * Wait for RTP packet flow for the given MediaStreamTrack.
    *
    * @param {object} track
    *        A MediaStreamTrack to wait for data flow on.
    * @returns {Promise}
    *        Returns a promise which yields a StatsReport object with RTP stats.
    */
   async waitForRtpFlow(track) {
+    info("waitForRtpFlow("+track.id+")");
     let hasFlow = (stats, retries) => {
       info("Checking for stats in " + JSON.stringify(stats) + " for " + track.kind
         + " track " + track.id + ", retry number " + retries);
       let rtp = stats.get([...Object.keys(stats)].find(key =>
         !stats.get(key).isRemote && stats.get(key).type.endsWith("bound-rtp")));
       if (!rtp) {
         return false;
       }
--- a/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html
+++ b/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html
@@ -20,49 +20,48 @@ runNetworkTest(() => {
   var canvas = document.createElement('canvas');
   var stream;
   canvas.id = 'source_canvas';
   canvas.width = canvas.height = 10;
   document.getElementById('content').appendChild(canvas);
 
   test.setMediaConstraints([{video: true}], []);
   test.chain.replace("PC_LOCAL_GUM", [
-    function PC_LOCAL_DRAW_INITIAL_LOCAL_GREEN(test) {
+    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       h.drawColor(canvas, h.green);
-    },
-    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       stream = canvas.captureStream(0);
       test.pcLocal.attachLocalStream(stream);
+      stream.requestFrame();
+      var i = 0;
+      return setInterval(function() {
+        try {
+          info("draw " + i ? "green" : "red");
+          h.drawColor(canvas, i ? h.green : h.red);
+          i = 1 - i;
+          stream.requestFrame();
+        } catch (e) {
+          // ignore; stream might have shut down, and we don't bother clearing
+          // the setInterval.
+        }
+      }, 500);
     }
   ]);
   test.chain.append([
     function PC_REMOTE_WAIT_FOR_REMOTE_GREEN() {
       mediaElement = test.pcRemote.remoteMediaElements[0];
       ok(!!mediaElement, "Should have remote video element for pcRemote");
       return h.waitForPixelColor(mediaElement, h.green, 128,
                                  "pcRemote's remote should become green");
     },
     function PC_LOCAL_DRAW_LOCAL_RED() {
       // After requesting a frame it will be captured at the time of next render.
       // Next render will happen at next stable state, at the earliest,
       // i.e., this order of `requestFrame(); draw();` should work.
       stream.requestFrame();
       h.drawColor(canvas, h.red);
-      var i = 0;
-      return setInterval(function() {
-        try {
-          info("draw " + i ? "green" : "red");
-          h.drawColor(canvas, i ? h.green : h.red);
-          i = 1 - i;
-          stream.requestFrame();
-        } catch (e) {
-          // ignore; stream might have shut down, and we don't bother clearing
-          // the setInterval.
-        }
-      }, 500);
     },
     function PC_REMOTE_WAIT_FOR_REMOTE_RED() {
       return h.waitForPixelColor(mediaElement, h.red, 128,
                                  "pcRemote's remote should become red");
     }
   ]);
   test.run();
 });
--- a/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
+++ b/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
@@ -22,49 +22,48 @@ runNetworkTest((options) => {
   var canvas = document.createElement('canvas');
   var stream;
   canvas.id = 'source_canvas';
   canvas.width = canvas.height = 10;
   document.getElementById('content').appendChild(canvas);
 
   test.setMediaConstraints([{video: true}], []);
   test.chain.replace("PC_LOCAL_GUM", [
-    function PC_LOCAL_DRAW_INITIAL_LOCAL_GREEN(test) {
+    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       h.drawColor(canvas, h.green);
-    },
-    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       stream = canvas.captureStream(0);
       test.pcLocal.attachLocalStream(stream);
+      stream.requestFrame();
+      var i = 0;
+      return setInterval(function() {
+        try {
+          info("draw " + i ? "green" : "red");
+          h.drawColor(canvas, i ? h.green : h.red);
+          i = 1 - i;
+          stream.requestFrame();
+        } catch (e) {
+          // ignore; stream might have shut down, and we don't bother clearing
+          // the setInterval.
+        }
+      }, 500);
     }
   ]);
   test.chain.append([
     function PC_REMOTE_WAIT_FOR_REMOTE_GREEN() {
       mediaElement = test.pcRemote.remoteMediaElements[0];
       ok(!!mediaElement, "Should have remote video element for pcRemote");
       return h.waitForPixelColor(mediaElement, h.green, 128,
                                  "pcRemote's remote should become green");
     },
     function PC_LOCAL_DRAW_LOCAL_RED() {
       // After requesting a frame it will be captured at the time of next render.
       // Next render will happen at next stable state, at the earliest,
       // i.e., this order of `requestFrame(); draw();` should work.
       stream.requestFrame();
       h.drawColor(canvas, h.red);
-      var i = 0;
-      return setInterval(function() {
-        try {
-          info("draw " + i ? "green" : "red");
-          h.drawColor(canvas, i ? h.green : h.red);
-          i = 1 - i;
-          stream.requestFrame();
-        } catch (e) {
-          // ignore; stream might have shut down, and we don't bother clearing
-          // the setInterval.
-        }
-      }, 500);
     },
     function PC_REMOTE_WAIT_FOR_REMOTE_RED() {
       return h.waitForPixelColor(mediaElement, h.red, 128,
                                  "pcRemote's remote should become red");
     }
   ]);
   test.run();
 });
--- a/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_webgl.html
+++ b/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_webgl.html
@@ -69,23 +69,33 @@ runNetworkTest(() => {
       gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
       squareBuffer.itemSize = 2;
       squareBuffer.numItems = 4;
 
       program.aPosition = gl.getAttribLocation(program, "aPosition");
       gl.enableVertexAttribArray(program.aPosition);
       gl.vertexAttribPointer(program.aPosition, squareBuffer.itemSize, gl.FLOAT, false, 0, 0);
     },
-    function DRAW_LOCAL_GREEN(test) {
+    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       h.drawColor(canvas, h.green);
-    },
-    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       test.pcLocal.canvasStream = canvas.captureStream(0.0);
       is(test.pcLocal.canvasStream.canvas, canvas, "Canvas attribute is correct");
       test.pcLocal.attachLocalStream(test.pcLocal.canvasStream);
+      var i = 0;
+      return setInterval(function() {
+        try {
+          info("draw " + i ? "green" : "red");
+          h.drawColor(canvas, i ? h.green : h.red);
+          i = 1 - i;
+          test.pcLocal.canvasStream.requestFrame();
+        } catch (e) {
+          // ignore; stream might have shut down, and we don't bother clearing
+          // the setInterval.
+        }
+      }, 500);
     }
   ]);
   test.chain.append([
     function FIND_REMOTE_VIDEO() {
       vremote = test.pcRemote.remoteMediaElements[0];
       ok(!!vremote, "Should have remote video element for pcRemote");
     },
     function WAIT_FOR_REMOTE_GREEN() {
@@ -95,26 +105,16 @@ runNetworkTest(() => {
     function REQUEST_FRAME(test) {
       // After requesting a frame it will be captured at the time of next render.
       // Next render will happen at next stable state, at the earliest,
       // i.e., this order of `requestFrame(); draw();` should work.
       test.pcLocal.canvasStream.requestFrame();
     },
     function DRAW_LOCAL_RED() {
       h.drawColor(canvas, h.red);
-      return setInterval(function() {
-        try {
-          info("draw");
-          h.drawColor(canvas, h.red);
-          test.pcLocal.canvasStream.requestFrame();
-        } catch (e) {
-          // ignore; stream might have shut down, and we don't bother clearing
-          // the setInterval.
-        }
-      }, 500);
     },
     function WAIT_FOR_REMOTE_RED() {
       return h.waitForPixelColor(vremote, h.red, 128,
                                  "pcRemote's remote should become red");
     }
   ]);
   test.run();
 });
--- a/dom/media/tests/mochitest/test_peerConnection_multiple_captureStream_canvas_2d.html
+++ b/dom/media/tests/mochitest/test_peerConnection_multiple_captureStream_canvas_2d.html
@@ -25,98 +25,71 @@ runNetworkTest(() => {
   var canvas1 = h.createAndAppendElement('canvas', 'source_canvas1');
 
   var vremote2;
   var stream2;
   var canvas2 = h.createAndAppendElement('canvas', 'source_canvas2');
 
   test.setMediaConstraints([{video: true}, {video: true}], []);
   test.chain.replace("PC_LOCAL_GUM", [
-    function DRAW_INITIAL_LOCAL1_GREEN(test) {
+    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       h.drawColor(canvas1, h.green);
-    },
-    function DRAW_INITIAL_LOCAL2_BLUE(test) {
       h.drawColor(canvas2, h.blue);
-    },
-    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       stream1 = canvas1.captureStream(0); // fps = 0 to capture single frame
       test.pcLocal.attachLocalStream(stream1);
       stream2 = canvas2.captureStream(0); // fps = 0 to capture single frame
       test.pcLocal.attachLocalStream(stream2);
+      var i = 0;
+      return setInterval(function() {
+        try {
+          info("draw " + i ? "green" : "red/blue");
+          h.drawColor(canvas1, i ? h.green : h.red);
+          h.drawColor(canvas2, i ? h.green : h.blue);
+          i = 1 - i;
+          stream1.requestFrame();
+          stream2.requestFrame();
+        } catch (e) {
+          // ignore; stream might have shut down, and we don't bother clearing
+          // the setInterval.
+        }
+      }, 500);
     }
   ]);
 
   test.chain.append([
     function CHECK_REMOTE_VIDEO() {
       is(test.pcRemote.remoteMediaElements.length, 2, "pcRemote Should have 2 remote media elements");
       vremote1 = test.pcRemote.remoteMediaElements[0];
       vremote2 = test.pcRemote.remoteMediaElements[1];
 
       // since we don't know which remote video is created first, we don't know
-      // which should be blue or green, but this will make sure that one is
+      // which should be blue or red, but this will make sure that one is
       // green and one is blue
       return Promise.race([
                Promise.all([
-                 h.waitForPixelColor(vremote1, h.green, 128,
-                                     "pcRemote's remote1 should become green"),
+                 h.waitForPixelColor(vremote1, h.red, 128,
+                                     "pcRemote's remote1 should become red"),
                  h.waitForPixelColor(vremote2, h.blue, 128,
                                      "pcRemote's remote2 should become blue")
                ]),
                Promise.all([
-                 h.waitForPixelColor(vremote2, h.green, 128,
-                                     "pcRemote's remote2 should become green"),
+                 h.waitForPixelColor(vremote2, h.red, 128,
+                                     "pcRemote's remote2 should become red"),
                  h.waitForPixelColor(vremote1, h.blue, 128,
                                      "pcRemote's remote1 should become blue")
                ])
              ]);
     },
-    function DRAW_LOCAL1_RED() {
-      // After requesting a frame it will be captured at the time of next render.
-      // Next render will happen at next stable state, at the earliest,
-      // i.e., this order of `requestFrame(); draw();` should work.
-      h.drawColor(canvas1, h.red);
-      stream1.requestFrame();
-      var i = 0;
-      return setInterval(function() {
-        try {
-          info("draw " + i ? "green" : "red");
-          h.drawColor(canvas1, i ? h.green : h.red);
-          i = 1 - i;
-          stream1.requestFrame();
-        } catch (e) {
-          // ignore; stream might have shut down, and we don't bother clearing
-          // the setInterval.
-        }
-      }, 500);
+    function WAIT_FOR_REMOTE_BOTH_GREEN() {
+      return Promise.all([
+               h.waitForPixelColor(vremote1, h.green, 128,
+                                   "pcRemote's remote1 should become green"),
+               h.waitForPixelColor(vremote2, h.green, 128,
+                                 "pcRemote's remote2 should become green")
+             ])
     },
-    function DRAW_LOCAL2_RED() {
-      // After requesting a frame it will be captured at the time of next render.
-      // Next render will happen at next stable state, at the earliest,
-      // i.e., this order of `requestFrame(); draw();` should work.
-      h.drawColor(canvas2, h.red);
-      stream2.requestFrame();
-      return setInterval(function() {
-        try {
-          info("draw");
-          h.drawColor(canvas2, i ? h.green : h.red);
-          i = 1 - i;
-          stream2.requestFrame();
-        } catch (e) {
-          // ignore; stream might have shut down, and we don't bother clearing
-          // the setInterval.
-        }
-      }, 500);
-    },
-    function WAIT_FOR_REMOTE1_RED() {
-      return h.waitForPixelColor(vremote1, h.red, 128,
-                                 "pcRemote's remote1 should become red");
-    },
-    function WAIT_FOR_REMOTE2_RED() {
-      return h.waitForPixelColor(vremote2, h.red, 128,
-                                 "pcRemote's remote2 should become red");
-    }
   ]);
   test.run();
 });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_renderAfterRenegotiation.html
+++ b/dom/media/tests/mochitest/test_peerConnection_renderAfterRenegotiation.html
@@ -31,28 +31,25 @@
       mustThrowWith("RTCTrackEvent wo/required args",
                     "TypeError", () => new RTCTrackEvent("track", {}));
       v2.srcObject = e.streams[0];
       resolve();
     }
   });
 
   runNetworkTest(function() {
-    var h = new CaptureStreamTestHelper2D();
-    var canvas = document.createElement('canvas');
-    canvas.id = 'source_canvas';
-    canvas.width = canvas.height = 10;
-    document.getElementById('content').appendChild(canvas);
-
     v2 = createMediaElement('video', 'v2');
     is(v2.currentTime, 0, "v2.currentTime is zero at outset");
 
-    h.drawColor(canvas, h.blue);
-    var stream = canvas.captureStream(0);
-    stream.getTracks().forEach(t => pc1.addTrack(t, stream));
+    const emitter = new VideoFrameEmitter(CaptureStreamTestHelper.prototype.blue,
+                                          CaptureStreamTestHelper.prototype.green,
+                                          10);
+    emitter.start();
+    emitter.stream().getTracks().forEach(t => pc1.addTrack(t, emitter.stream()));
+    let h = emitter.helper();
 
     pc1.createOffer({})
     .then(offer => pc1.setLocalDescription(offer))
     .then(() => pc2.setRemoteDescription(pc1.localDescription))
     .then(() => pc2.createAnswer({}))  // check that createAnswer accepts arg.
     .then(answer => pc2.setLocalDescription(answer))
     .then(() => pc1.setRemoteDescription(pc2.localDescription))
 
@@ -61,25 +58,26 @@
     .then(offer => pc1.setLocalDescription(offer))
     .then(() => pc2.setRemoteDescription(pc1.localDescription))
     .then(() => pc2.createAnswer({}))
     .then(answer => pc2.setLocalDescription(answer))
     .then(() => pc1.setRemoteDescription(pc2.localDescription))
     .then(() => delivered)
 
     // now verify that actually something gets rendered into the remote video
-    // element
+    // element.
     .then(() => h.waitForPixelColor(v2, h.blue, 128,
-                                    "pcRemote's video should become green"))
+                                    "pcRemote's video should become blue"))
+    // This will verify that new changes to the canvas propagate through
+    // the peerconnection
     .then(() => {
-      stream.requestFrame();
-      h.drawColor(canvas, h.red);
+      emitter.colors(h.red, h.green)
       })
     .then(() => h.waitForPixelColor(v2, h.red, 128,
-                                    "pcRemote's video should become green"))
+                                    "pcRemote's video should become red"))
 
     .catch(reason => ok(false, "unexpected failure: " + reason))
     .then(networkTestFinished);
   });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
+++ b/dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
@@ -7,17 +7,20 @@
 <body>
 <pre id="test">
 <script type="application/javascript">
   createHTML({
     bug: "1017888",
     title: "Renegotiation: replaceTrack followed by adding a second video stream"
   });
 
+  const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
+
   runNetworkTest(function (options) {
+   pushPrefs(['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
     const test = new PeerConnectionTest(options);
     test.setMediaConstraints([{video:true}], [{video:true}]);
     const helper = new VideoStreamHelper();
     const emitter1 = new VideoFrameEmitter(CaptureStreamTestHelper.prototype.red,
                                            CaptureStreamTestHelper.prototype.green);
     const emitter2 = new VideoFrameEmitter(CaptureStreamTestHelper.prototype.blue,
                                            CaptureStreamTestHelper.prototype.grey);
     test.chain.replace("PC_LOCAL_GUM", [
@@ -76,13 +79,15 @@
             return Promise.reject(new Error("Couldn't find video element"));
           }
           return helper.checkVideoPlaying(vremote, 10, 10, 16);
         },
       ]
     );
 
     test.run();
+   });
   });
+
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_scaleResolution.html
+++ b/dom/media/tests/mochitest/test_peerConnection_scaleResolution.html
@@ -7,16 +7,18 @@
 <pre id="test">
 <script type="application/javascript">
   createHTML({
     bug: "1244913",
     title: "Scale resolution down on a PeerConnection",
     visible: true
   });
 
+  const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
+
   var mustRejectWith = (msg, reason, f) =>
     f().then(() => ok(false, msg),
              e => is(e.name, reason, msg));
 
   var removeAllButCodec = (d, codec) =>
     (d.sdp = d.sdp.replace(/m=video (\w) UDP\/TLS\/RTP\/SAVPF \w.*\r\n/,
                            "m=video $1 UDP/TLS/RTP/SAVPF " + codec + "\r\n"), d);
 
@@ -72,19 +74,21 @@
       .then(() => {
         stream.getTracks().forEach(track => track.stop());
         v1.srcObject = v2.srcObject = null;
       })
     })
     .catch(generateErrorCallback());
   }
 
-  if (!navigator.appVersion.includes("Android")) {
-    runNetworkTest(() => testScale("VP8").then(() => testScale("H264"))
-                   .then(networkTestFinished));
-  } else {
-    // No support for H.264 on Android in automation, see Bug 1355786
-    runNetworkTest(() => testScale("VP8").then(networkTestFinished));
-  }
+  pushPrefs(['media.peerconnection.video.lock_scaling', true]).then(() => {
+    if (!navigator.appVersion.includes("Android")) {
+      runNetworkTest(() => testScale("VP8").then(() => testScale("H264"))
+                    .then(networkTestFinished));
+    } else {
+      // No support for H.264 on Android in automation, see Bug 1355786
+      runNetworkTest(() => testScale("VP8").then(networkTestFinished));
+    }
+  });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
@@ -42,16 +42,17 @@
       test = new PeerConnectionTest({bundle: false});
       test.setMediaConstraints([{video: true}], [{video: true}]);
 
       test.chain.replace("PC_REMOTE_GUM", [
         function PC_REMOTE_CANVAS_CAPTURESTREAM(test) {
           emitter = new VideoFrameEmitter();
           helper = new VideoStreamHelper();
           test.pcRemote.attachLocalStream(emitter.stream());
+          emitter.start();
         }
       ]);
 
       test.chain.insertAfter('PC_REMOTE_GET_OFFER', [
         function PC_REMOTE_SET_RIDS(test) {
           const senders = test.pcRemote._pc.getSenders();
           is(senders.length, 1, "We have exactly one RTP sender");
           const sender = senders[0];
--- a/dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
@@ -42,16 +42,17 @@
       const test = new PeerConnectionTest({bundle: false});
       test.setMediaConstraints([{video: true}], []);
 
       test.chain.replace("PC_LOCAL_GUM", [
         function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
           emitter = new VideoFrameEmitter();
           helper = new VideoStreamHelper();
           test.pcLocal.attachLocalStream(emitter.stream());
+          emitter.start();
         }
       ]);
 
       test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
         function PC_LOCAL_SET_RIDS(test) {
           const senders = test.pcLocal._pc.getSenders();
           is(senders.length, 1, "We have exactly one RTP sender");
           const sender = senders[0];
--- a/dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
+++ b/dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
@@ -28,47 +28,44 @@ runNetworkTest(() => {
   test.setMediaConstraints([{video: true}], []);
   test.chain.replace("PC_LOCAL_GUM", [
     function DRAW_INITIAL_LOCAL_GREEN(test) {
       h1.drawColor(canvas1, h1.green);
     },
     function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       stream1 = canvas1.captureStream(0);
       test.pcLocal.attachLocalStream(stream1);
+      var i = 0;
+      return setInterval(function() {
+        try {
+          info("draw " + i ? "green" : "red");
+          h1.drawColor(canvas1, i ? h1.green : h1.red);
+          i = 1 - i;
+          stream1.requestFrame();
+          if (stream2 != null) {
+            h2.drawColor(canvas2, i ? h2.green : h2.blue);
+            stream2.requestFrame();
+          }
+        } catch (e) {
+          // ignore; stream might have shut down, and we don't bother clearing
+          // the setInterval.
+        }
+      }, 500);
     }
   ]);
 
   test.chain.append([
     function FIND_REMOTE_VIDEO() {
       vremote1 = test.pcRemote.remoteMediaElements[0];
       ok(!!vremote1, "Should have remote video element for pcRemote");
     },
     function WAIT_FOR_REMOTE_GREEN() {
       return h1.waitForPixelColor(vremote1, h1.green, 128,
                                  "pcRemote's remote should become green");
     },
-    function DRAW_LOCAL_RED() {
-      // After requesting a frame it will be captured at the time of next render.
-      // Next render will happen at next stable state, at the earliest,
-      // i.e., this order of `requestFrame(); draw();` should work.
-      stream1.requestFrame();
-      h1.drawColor(canvas1, h1.red);
-      var i = 0;
-      return setInterval(function() {
-        try {
-          info("draw " + i ? "green" : "red");
-          h1.drawColor(canvas1, i ? h1.green : h1.red);
-          i = 1 - i;
-          stream1.requestFrame();
-        } catch (e) {
-          // ignore; stream might have shut down, and we don't bother clearing
-          // the setInterval.
-        }
-      }, 500);
-    },
     function WAIT_FOR_REMOTE_RED() {
       return h1.waitForPixelColor(vremote1, h1.red, 128,
                                  "pcRemote's remote should become red");
     }
   ]);
 
   addRenegotiation(test.chain,
     [
--- a/dom/media/tests/mochitest/test_peerConnection_videoRenegotiationInactiveAnswer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_videoRenegotiationInactiveAnswer.html
@@ -18,24 +18,24 @@
     const emitter = new VideoFrameEmitter();
     const helper = new VideoStreamHelper();
 
     test = new PeerConnectionTest(options);
 
     test.chain.replace("PC_LOCAL_GUM", [
       function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
         test.pcLocal.attachLocalStream(emitter.stream());
+        emitter.start();
       }
     ]);
 
     test.chain.append([
       function PC_REMOTE_WAIT_FOR_FRAMES() {
         var vremote = test.pcRemote.remoteMediaElements[0];
         ok(vremote, "Should have remote video element for pcRemote");
-        emitter.start();
         return addFinallyToPromise(helper.checkVideoPlaying(vremote, 10, 10, 16))
             .finally(() => emitter.stop());
       }
     ]);
 
     addRenegotiation(test.chain, []);
 
     test.chain.insertAfter("PC_LOCAL_GET_ANSWER", [
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.h
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.h
@@ -27,19 +27,16 @@
 #include "VideoSegment.h"
 #include "AudioSegment.h"
 #include "StreamTracks.h"
 #include "MediaStreamGraph.h"
 
 #include "MediaEngineWrapper.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 
-// WebRTC library includes follow
-#include "webrtc/common.h"
-
 // Camera Access via IPC
 #include "CamerasChild.h"
 
 #include "NullTransport.h"
 
 namespace mozilla {
 
 /**
--- a/dom/media/webrtc/MediaEngineWebRTC.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTC.cpp
@@ -105,18 +105,16 @@ void AudioInputCubeb::UpdateDeviceList()
   mDevices = devices;
 }
 
 MediaEngineWebRTC::MediaEngineWebRTC(MediaEnginePrefs &aPrefs)
   : mMutex("mozilla::MediaEngineWebRTC"),
     mVoiceEngine(nullptr),
     mAudioInput(nullptr),
     mFullDuplex(aPrefs.mFullDuplex),
-    mExtendedFilter(aPrefs.mExtendedFilter),
-    mDelayAgnostic(aPrefs.mDelayAgnostic),
     mHasTabVideoSource(false)
 {
   nsCOMPtr<nsIComponentRegistrar> compMgr;
   NS_GetComponentRegistrar(getter_AddRefs(compMgr));
   if (compMgr) {
     compMgr->IsContractIDRegistered(NS_TABSOURCESERVICE_CONTRACTID, &mHasTabVideoSource);
   }
   // XXX
@@ -292,20 +290,17 @@ MediaEngineWebRTC::EnumerateAudioDevices
 
   if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
     LOG(("VoiceEngine:SetAndroidObjects Failed"));
     return;
   }
 #endif
 
   if (!mVoiceEngine) {
-    mConfig.Set<webrtc::ExtendedFilter>(new webrtc::ExtendedFilter(mExtendedFilter));
-    mConfig.Set<webrtc::DelayAgnostic>(new webrtc::DelayAgnostic(mDelayAgnostic));
-
-    mVoiceEngine = webrtc::VoiceEngine::Create(mConfig);
+    mVoiceEngine = webrtc::VoiceEngine::Create(/*mConfig*/);
     if (!mVoiceEngine) {
       return;
     }
   }
 
   ptrVoEBase = webrtc::VoEBase::GetInterface(mVoiceEngine);
   if (!ptrVoEBase) {
     return;
--- a/dom/media/webrtc/MediaEngineWebRTC.h
+++ b/dom/media/webrtc/MediaEngineWebRTC.h
@@ -35,17 +35,16 @@
 #include "CubebUtils.h"
 #include "AudioPacketizer.h"
 
 #include "MediaEngineWrapper.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 #include "CamerasChild.h"
 
 // WebRTC library includes follow
-#include "webrtc/common.h"
 // Audio Engine
 #include "webrtc/voice_engine/include/voe_base.h"
 #include "webrtc/voice_engine/include/voe_codec.h"
 #include "webrtc/voice_engine/include/voe_hardware.h"
 #include "webrtc/voice_engine/include/voe_network.h"
 #include "webrtc/voice_engine/include/voe_audio_processing.h"
 #include "webrtc/voice_engine/include/voe_volume_control.h"
 #include "webrtc/voice_engine/include/voe_external_media.h"
@@ -636,21 +635,18 @@ private:
     gFarendObserver = nullptr;
   }
 
   nsCOMPtr<nsIThread> mThread;
 
   // gUM runnables can e.g. Enumerate from multiple threads
   Mutex mMutex;
   webrtc::VoiceEngine* mVoiceEngine;
-  webrtc::Config mConfig;
   RefPtr<mozilla::AudioInput> mAudioInput;
   bool mFullDuplex;
-  bool mExtendedFilter;
-  bool mDelayAgnostic;
   bool mHasTabVideoSource;
 
   // Store devices we've already seen in a hashtable for quick return.
   // Maps UUID to MediaEngineSource (one set for audio, one for video).
   nsRefPtrHashtable<nsStringHashKey, MediaEngineVideoSource> mVideoSources;
   nsRefPtrHashtable<nsStringHashKey, MediaEngineAudioSource> mAudioSources;
 };
 
--- a/ipc/chromium/src/third_party/moz.build
+++ b/ipc/chromium/src/third_party/moz.build
@@ -49,9 +49,11 @@ if os_linux:
     if CONFIG['OS_TARGET'] != 'Android':
         UNIFIED_SOURCES += [
             'libevent/epoll_sub.c',
         ]
 
 # We allow warnings for third-party code that can be updated from upstream.
 ALLOW_COMPILER_WARNINGS = True
 
+Library('libevent')
+
 FINAL_LIBRARY = 'xul'
--- a/media/libvpx/moz.build
+++ b/media/libvpx/moz.build
@@ -81,20 +81,19 @@ FINAL_LIBRARY = 'gkmedias'
 
 DEFINES['HAVE_CONFIG_H'] = 'vpx_config.h'
 
 if CONFIG['OS_TARGET'] == 'Android':
     # Older versions of the Android NDK don't pre-define anything to indicate
     # the OS they're on, so do it for them.
     DEFINES['__linux__'] = True
 
-    if not CONFIG['MOZ_WEBRTC']:
-        SOURCES += [
-            '%%%s/sources/android/cpufeatures/cpu-features.c' % CONFIG['ANDROID_NDK'],
-        ]
+    SOURCES += [
+        '%%%s/sources/android/cpufeatures/cpu-features.c' % CONFIG['ANDROID_NDK'],
+    ]
 
 if CONFIG['CLANG_CL'] or not CONFIG['_MSC_VER']:
     for f in SOURCES:
         if f.endswith('.c'):
             if 'sse2.c' in f:
                 SOURCES[f].flags += CONFIG['SSE2_FLAGS']
             if 'ssse3.c' in f:
                 SOURCES[f].flags += ['-mssse3']
--- a/media/webrtc/moz.build
+++ b/media/webrtc/moz.build
@@ -22,35 +22,39 @@ webrtc_non_unified_sources = [
     'trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c', # Because of name clash in the kDampFilter variable
     'trunk/webrtc/modules/audio_coding/neteq/audio_vector.cc',                   # Because of explicit template specializations
     'trunk/webrtc/modules/audio_device/android/audio_manager.cc',                # Because of TAG redefinition
     'trunk/webrtc/modules/audio_device/android/audio_record_jni.cc',             # Becuse of commonly named module static vars
     'trunk/webrtc/modules/audio_device/android/audio_track_jni.cc',              # Becuse of commonly named module static vars
     'trunk/webrtc/modules/audio_device/android/opensles_player.cc',              # Because of TAG redefinition
     'trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc',       # Because of LATE()
     'trunk/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc',# Because of LATE()
-    'trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.cc',                # Because of name clash with #define FF
     'trunk/webrtc/modules/audio_device/win/audio_device_core_win.cc',            # Because of ordering assumptions in strsafe.h
-    'trunk/webrtc/modules/audio_processing/aec/aec_core.c',                      # Because of name clash in the ComfortNoise function
-    'trunk/webrtc/modules/audio_processing/aecm/aecm_core.c',                    # Because of name clash in the ComfortNoise function
-    'trunk/webrtc/modules/audio_processing/aecm/echo_control_mobile.c',          # Because of name clash in the kInitCheck variable
-    'trunk/webrtc/modules/audio_processing/agc/histogram.cc',                    # Because of duplicate definition of static consts with pitch_based_vad.cc
+    'trunk/webrtc/modules/audio_processing/aec/echo_cancellation.cc',            # Because of conflicts over 'near' on windows
+    'trunk/webrtc/modules/audio_processing/aecm/aecm_core.cc',                   # Because of the PART_LEN2 define
+    'trunk/webrtc/modules/audio_processing/aecm/aecm_core_c.cc',                 # Because of the PART_LEN2 define
+    'trunk/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc',         # Because of the PART_LEN2 define
     'trunk/webrtc/modules/audio_processing/agc/legacy/analog_agc.c',             # Because of name clash in the kInitCheck variable
     'trunk/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc', # Because of needing to define _USE_MATH_DEFINES before including <cmath>
+    'trunk/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc',  # Because of needing to define _USE_MATH_DEFINES before including <cmath>
     'trunk/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc',  # Because of needing to define _USE_MATH_DEFINES before including <cmath>
     'trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc',           # Because of name clash in the MapError function
     'trunk/webrtc/modules/audio_processing/echo_control_mobile_impl.cc',         # Because of name clash in the MapError function
+    'trunk/webrtc/modules/audio_processing/echo_detector/normalized_covariance_estimator.cc', # Because of kAlpha
     'trunk/webrtc/modules/audio_processing/gain_control_impl.cc',                # Because of name clash in the Handle typedef
-    'trunk/webrtc/modules/audio_processing/high_pass_filter_impl.cc',            # Because of name clash in the Handle typedef
     'trunk/webrtc/modules/audio_processing/noise_suppression_impl.cc',           # Because of name clash in the Handle typedef
+    'trunk/webrtc/modules/audio_processing/rms_level.cc',                        # Because of name clash in the kMinLevel variable
+    'trunk/webrtc/modules/congestion_controller/trendline_estimator.cc',         # Because of name clash in kDeltaCounterMax
+    'trunk/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc',       # Because base/logging.h uses #ifndef LOG before defining anything
     'trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc', # Because of duplicate definitions of static consts against remote_bitrate_estimator_abs_send_time.cc
-    'trunk/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc',                  # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces
-    'trunk/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc',                  # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces
-    'trunk/webrtc/modules/video_capture/android/device_info_android.cc',         # Because of duplicate module static variable names
-    'trunk/webrtc/modules/video_capture/android/video_capture_android.cc',       # Because of duplicate module static variable names
+    'trunk/webrtc/modules/rtp_rtcp/source/flexfec_receiver.cc',                  # Because of identically named functions and vars between flexfec_receiver.cc and flexfec_sender.cc in an anonymous namespaces
+    'trunk/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc',                 # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces
+    'trunk/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc',                 # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces
+    'trunk/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc',                  # Because of identically named constant kRedForFecHeaderLength in an anonymous namespace
+    'trunk/webrtc/modules/video_capture/windows/BaseFilter.cpp',                 # Because it locally defines NS_IF_ADDREF/RELEASE
     'trunk/webrtc/modules/video_capture/windows/device_info_ds.cc',              # Because of the MEDIASUBTYPE_HDYC variable
     'trunk/webrtc/modules/video_capture/windows/help_functions_ds.cc',           # Because of initguid.h
     'trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc',              # Because of the MEDIASUBTYPE_HDYC variable and initguid.h
     'trunk/webrtc/video/overuse_frame_detector.cc',                              # Because of name clash with call_stats.cc on kWeightFactor
 ]
 
 GYP_DIRS += ['trunk']
 
@@ -68,17 +72,17 @@ GYP_DIRS['trunk'].input = 'trunk/peercon
 GYP_DIRS['trunk'].variables = gyp_vars_copy
 # We allow warnings for third-party code that can be updated from upstream.
 GYP_DIRS['trunk'].sandbox_vars['ALLOW_COMPILER_WARNINGS'] = True
 GYP_DIRS['trunk'].sandbox_vars['FINAL_LIBRARY'] = 'webrtc'
 GYP_DIRS['trunk'].non_unified_sources += webrtc_non_unified_sources
 
 if CONFIG['ENABLE_TESTS']:
     TEST_DIRS += [
-        'trunk/gtest',
+#        'trunk/gtest',
     ]
 
 if CONFIG['MOZ_WEBRTC_SIGNALING']:
     GYP_DIRS += ['signaling']
     GYP_DIRS['signaling'].input = 'signaling/signaling.gyp'
     GYP_DIRS['signaling'].variables = gyp_vars_copy.copy()
     GYP_DIRS['signaling'].variables.update(
         build_for_test=0,
@@ -127,9 +131,10 @@ if CONFIG['MOZ_WEBRTC_SIGNALING']:
             CXXFLAGS += ['-Wno-invalid-source-encoding']
         else:
             CXXFLAGS += ['-validate-charset-']
 
     if CONFIG['ENABLE_TESTS']:
         TEST_DIRS += [
             'signaling/fuzztest',
             'signaling/gtest',
+            'trunk/gtest',
         ]
--- a/media/webrtc/signaling/signaling.gyp
+++ b/media/webrtc/signaling/signaling.gyp
@@ -298,16 +298,17 @@
           'defines': [
             'OS_LINUX',
             'SIP_OS_LINUX',
             'WEBRTC_POSIX',
             '_GNU_SOURCE',
             'LINUX',
             'GIPS_VER=3510',
             'SECLIB_OPENSSL',
+            'WEBRTC_BUILD_LIBEVENT',
           ],
 
           'cflags_mozilla': [
           ],
         }],
         ['OS=="android" or moz_widget_toolkit_gonk==1', {
           'cflags_mozilla': [
             # This warning complains about important MOZ_EXPORT attributes
@@ -345,16 +346,17 @@
           'cflags_mozilla': [
           ],
         }],
         ['OS=="mac" or OS=="ios"', {
           'include_dirs': [
           ],
           'defines': [
             'WEBRTC_POSIX',
+            'WEBRTC_MAC',
             'OS_MACOSX',
             'SIP_OS_OSX',
             'OSX',
             '_FORTIFY_SOURCE=2',
           ],
 
           'cflags_mozilla': [
           ],
--- a/media/webrtc/signaling/src/common/NullTransport.h
+++ b/media/webrtc/signaling/src/common/NullTransport.h
@@ -3,17 +3,17 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 
 #ifndef NULL_TRANSPORT_H_
 #define NULL_TRANSPORT_H_
 
 #include "mozilla/Attributes.h"
 
-#include "webrtc/transport.h"
+#include "webrtc/api/call/transport.h"
 
 namespace mozilla {
 
 /**
  * NullTransport is registered as ExternalTransport to throw away data
  */
 class NullTransport : public webrtc::Transport
 {
--- a/media/webrtc/signaling/src/common/YuvStamper.cpp
+++ b/media/webrtc/signaling/src/common/YuvStamper.cpp
@@ -4,16 +4,17 @@
 
 #ifdef HAVE_NETINET_IN_H
 #include <netinet/in.h>
 #elif defined XP_WIN
 #include <winsock2.h>
 #endif
 #include <string.h>
 
+#include "plarena.h"
 #include "nspr.h"
 #include "YuvStamper.h"
 #include "mozilla/Sprintf.h"
 
 typedef uint32_t UINT4; //Needed for r_crc32() call
 extern "C" {
 #include "r_crc32.h"
 }
--- a/media/webrtc/signaling/src/common/browser_logging/WebRtcLog.cpp
+++ b/media/webrtc/signaling/src/common/browser_logging/WebRtcLog.cpp
@@ -127,20 +127,16 @@ void ConfigWebRtcLog(mozilla::LogLevel l
     return;
   }
 
 #if defined(ANDROID)
   // Special case: use callback to pipe to NSPR logging.
   aLogFile.Assign(default_log_name);
 #else
 
-  // always capture LOG(...) << ... logging in webrtc.org code to nspr logs
-  if (!sSink) {
-    sSink = new LogSinkImpl();
-  }
   rtc::LoggingSeverity log_level;
   switch (level) {
     case mozilla::LogLevel::Verbose:
       log_level = rtc::LoggingSeverity::LS_VERBOSE;
       break;
     case mozilla::LogLevel::Debug:
     case mozilla::LogLevel::Info:
       log_level = rtc::LoggingSeverity::LS_INFO;
@@ -153,27 +149,42 @@ void ConfigWebRtcLog(mozilla::LogLevel l
       break;
     case mozilla::LogLevel::Disabled:
       log_level = rtc::LoggingSeverity::LS_NONE;
       break;
     default:
       MOZ_ASSERT(false);
       break;
   }
-  rtc::LogMessage::AddLogToStream(sSink, log_level);
+  rtc::LogMessage::LogToDebug(log_level);
+  if (level != mozilla::LogLevel::Disabled) {
+    // always capture LOG(...) << ... logging in webrtc.org code to nspr logs
+    if (!sSink) {
+      sSink = new LogSinkImpl();
+      rtc::LogMessage::AddLogToStream(sSink, log_level);
+      // it's ok if this leaks to program end
+    }
+  } else if (sSink) {
+    rtc::LogMessage::RemoveLogToStream(sSink);
+    sSink = nullptr;
+  }
 
   webrtc::Trace::set_level_filter(trace_mask);
   if (trace_mask != 0) {
     // default WEBRTC_TRACE logs to a rotating file, but allow redirecting to nspr
     // XXX always redirect in e10s if the sandbox blocks file access, or somehow proxy
-    if (aLogFile.EqualsLiteral("nspr")) {
+    if (aLogFile.EqualsLiteral("nspr") || aLogFile.EqualsLiteral("moz_log")) {
+      rtc::LogMessage::SetLogToStderr(false);
       webrtc::Trace::SetTraceCallback(&gWebRtcCallback);
     } else {
+      rtc::LogMessage::SetLogToStderr(true);
       webrtc::Trace::SetTraceFile(aLogFile.get(), multi_log);
     }
+  } else {
+    rtc::LogMessage::SetLogToStderr(false);
   }
 
   if (aLogFile.IsEmpty()) {
     nsCOMPtr<nsIFile> tempDir;
     nsresult rv = NS_GetSpecialDirectory(NS_OS_TEMP_DIR, getter_AddRefs(tempDir));
     if (NS_SUCCEEDED(rv)) {
       tempDir->AppendNative(default_log_name);
       tempDir->GetNativePath(aLogFile);
@@ -231,16 +242,18 @@ void EnableWebRtcLog()
   nsAutoCString aec_log_dir;
 
   GetWebRtcLogPrefs(&trace_mask, &log_file, &aec_log_dir, &multi_log);
   mozilla::LogLevel level = CheckOverrides(&trace_mask, &log_file, &multi_log);
   ConfigWebRtcLog(level, trace_mask, log_file, aec_log_dir, multi_log);
   return;
 }
 
+// Called when we destroy the singletons from PeerConnectionCtx or if the
+// user changes logging in about:webrtc
 void StopWebRtcLog()
 {
   // TODO(NG) strip/fix gWebRtcTraceLoggingOn which is never set to true
   webrtc::Trace::set_level_filter(webrtc::kTraceNone);
   webrtc::Trace::SetTraceCallback(nullptr);
   webrtc::Trace::SetTraceFile(nullptr);
   if (sSink) {
     rtc::LogMessage::RemoveLogToStream(sSink);
--- a/media/webrtc/signaling/src/jsep/JsepSessionImpl.cpp
+++ b/media/webrtc/signaling/src/jsep/JsepSessionImpl.cpp
@@ -1,28 +1,30 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "logging.h"
-
-#include "webrtc/config.h"
 #include "signaling/src/jsep/JsepSessionImpl.h"
+
 #include <string>
 #include <set>
 #include <bitset>
 #include <stdlib.h>
 
+#include "plarena.h"
 #include "nspr.h"
 #include "nss.h"
 #include "pk11pub.h"
 #include "nsDebug.h"
-
-#include <mozilla/Move.h>
-#include <mozilla/UniquePtr.h>
+#include "logging.h"
+
+#include "mozilla/Move.h"
+#include "mozilla/UniquePtr.h"
+
+#include "webrtc/config.h"
 
 #include "signaling/src/jsep/JsepTrack.h"
 #include "signaling/src/jsep/JsepTrack.h"
 #include "signaling/src/jsep/JsepTransport.h"
 #include "signaling/src/sdp/Sdp.h"
 #include "signaling/src/sdp/SipccSdp.h"
 #include "signaling/src/sdp/SipccSdpParser.h"
 #include "mozilla/net/DataChannelProtocol.h"
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -16,20 +16,18 @@
 #include "mozilla/Services.h"
 #include "nsServiceManagerUtils.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 #include "nsThreadUtils.h"
 #include "Latency.h"
 #include "mozilla/Telemetry.h"
 
-#include "webrtc/common.h"
 #include "webrtc/modules/audio_processing/include/audio_processing.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
-#include "webrtc/voice_engine/include/voe_dtmf.h"
 #include "webrtc/voice_engine/include/voe_errors.h"
 #include "webrtc/voice_engine/voice_engine_impl.h"
 #include "webrtc/system_wrappers/include/clock.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "AndroidJNIWrapper.h"
 #endif
 
@@ -85,16 +83,17 @@ WebrtcAudioConduit::~WebrtcAudioConduit(
     mPtrVoENetwork->DeRegisterExternalTransport(mChannel);
   }
 
   if(mPtrVoEBase)
   {
     mPtrVoEBase->StopPlayout(mChannel);
     mPtrVoEBase->StopSend(mChannel);
     mPtrVoEBase->StopReceive(mChannel);
+    mChannelProxy = nullptr;
     mPtrVoEBase->DeleteChannel(mChannel);
     // We don't Terminate() the VoEBase here, because the Call (owned by
     // PeerConnectionMedia) actually owns the (shared) VoEBase/VoiceEngine
     // here
   }
 
   // We shouldn't delete the VoiceEngine until all these are released!
   // And we can't use a Scoped ptr, since the order is arbitrary
@@ -209,33 +208,36 @@ NTPtoDOMHighResTimeStamp(uint32_t ntpHig
 }
 
 bool WebrtcAudioConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
                                                uint32_t* jitterMs,
                                                uint32_t* packetsReceived,
                                                uint64_t* bytesReceived,
                                                uint32_t* cumulativeLost,
                                                int32_t* rttMs) {
-  uint32_t ntpHigh, ntpLow;
-  uint16_t fractionLost;
-  bool result = !mPtrRTP->GetRemoteRTCPReceiverInfo(mChannel, ntpHigh, ntpLow,
-                                                    *packetsReceived,
-                                                    *bytesReceived,
-                                                    *jitterMs,
-                                                    fractionLost,
-                                                    *cumulativeLost,
-                                                    *rttMs);
-  // Note: rrtMs is 0 when unavailable before the VoE rework. It is likely
-  // that after the audio moves to the new Call API that rttMs will be -1
-  // when unavailable.
-  if (!result) {
-    return false;
-  }
-  // Note: timestamp is not correct per the spec... should be time the rtcp
-  // was received (remote) or sent (local)
+
+  // We get called on STS thread... the proxy thread-checks to MainThread
+  // I removed the check, since GetRTCPStatistics ends up going down to
+  // methods (rtp_receiver_->SSRC() and rtp_receive_statistics_->GetStatistician()
+  // and GetStatistics that internally lock, so we're ok here without a thread-check.
+  webrtc::CallStatistics call_stats = mChannelProxy->GetRTCPStatistics();
+  *bytesReceived = call_stats.bytesReceived;
+  *packetsReceived = call_stats.packetsReceived;
+  *cumulativeLost = call_stats.cumulativeLost;
+  *rttMs = call_stats.rttMs;
+
+  unsigned int averageJitterMs;
+  unsigned int maxJitterMs;
+  unsigned int discardedPackets;
+  unsigned int cumulative;
+  mChannelProxy->GetRTPStatistics(averageJitterMs, maxJitterMs, discardedPackets, cumulative);
+  *jitterMs = averageJitterMs;
+
+  // XXX Note: timestamp is not correct per the spec... should be time the
+  // rtcp was received (remote) or sent (local)
   *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
   return true;
 }
 
 bool WebrtcAudioConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
                                              unsigned int* packetsSent,
                                              uint64_t* bytesSent) {
   webrtc::RTCPSenderInfo senderInfo;
@@ -248,45 +250,40 @@ bool WebrtcAudioConduit::GetRTCPSenderRe
     *timestamp = NTPtoDOMHighResTimeStamp(senderInfo.NTPseconds,
                                           senderInfo.NTPfraction);
     *packetsSent = senderInfo.sendPacketCount;
     *bytesSent = senderInfo.sendOctetCount;
    }
    return result;
  }
 
-bool WebrtcAudioConduit::SetDtmfPayloadType(unsigned char type) {
+bool WebrtcAudioConduit::SetDtmfPayloadType(unsigned char type, int freq) {
   CSFLogInfo(logTag, "%s : setting dtmf payload %d", __FUNCTION__, (int)type);
 
-  ScopedCustomReleasePtr<webrtc::VoEDtmf> mPtrVoEDtmf;
-  mPtrVoEDtmf = webrtc::VoEDtmf::GetInterface(mVoiceEngine);
-  if (!mPtrVoEDtmf) {
-    CSFLogError(logTag, "%s Unable to initialize VoEDtmf", __FUNCTION__);
-    return false;
-  }
-
-  int result = mPtrVoEDtmf->SetSendTelephoneEventPayloadType(mChannel, type);
+  int result = mChannelProxy->SetSendTelephoneEventPayloadType(type, freq);
   if (result == -1) {
-    CSFLogError(logTag, "%s Failed call to SetSendTelephoneEventPayloadType",
-                        __FUNCTION__);
+    CSFLogError(logTag, "%s Failed call to SetSendTelephoneEventPayloadType(%u, %d)",
+                __FUNCTION__, type, freq);
   }
   return result != -1;
 }
 
 bool WebrtcAudioConduit::InsertDTMFTone(int channel, int eventCode,
                                         bool outOfBand, int lengthMs,
                                         int attenuationDb) {
   NS_ASSERTION(!NS_IsMainThread(), "Do not call on main thread");
 
   if (!mVoiceEngine || !mDtmfEnabled) {
     return false;
   }
 
-  webrtc::VoiceEngineImpl* s = static_cast<webrtc::VoiceEngineImpl*>(mVoiceEngine);
-  int result = s->SendTelephoneEvent(channel, eventCode, outOfBand, lengthMs, attenuationDb);
+  int result = 0;
+  if (outOfBand){
+    result = mChannelProxy->SendTelephoneEventOutband(eventCode, lengthMs);
+  }
   return result != -1;
 }
 
 /*
  * WebRTCAudioConduit Implementation
  */
 MediaConduitErrorCode WebrtcAudioConduit::Init()
 {
@@ -363,16 +360,20 @@ MediaConduitErrorCode WebrtcAudioConduit
     return kMediaConduitSessionNotInited;
   }
 
   if( (mChannel = mPtrVoEBase->CreateChannel()) == -1)
   {
     CSFLogError(logTag, "%s VoiceEngine Channel creation failed",__FUNCTION__);
     return kMediaConduitChannelError;
   }
+  // Needed to access TelephoneEvent APIs in 57 if we're not using Call/audio_send_stream/etc
+  webrtc::VoiceEngineImpl* s = static_cast<webrtc::VoiceEngineImpl*>(mVoiceEngine);
+  mChannelProxy = s->GetChannelProxy(mChannel);
+  MOZ_ASSERT(mChannelProxy);
 
   CSFLogDebug(logTag, "%s Channel Created %d ",__FUNCTION__, mChannel);
 
   if(mPtrVoENetwork->RegisterExternalTransport(mChannel, *this) == -1)
   {
     CSFLogError(logTag, "%s VoiceEngine, External Transport Failed",__FUNCTION__);
     return kMediaConduitTransportRegistrationFail;
   }
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -10,26 +10,27 @@
 #include "mozilla/TimeStamp.h"
 #include "nsTArray.h"
 
 #include "MediaConduitInterface.h"
 #include "MediaEngineWrapper.h"
 
 // Audio Engine Includes
 #include "webrtc/common_types.h"
-#include "webrtc/transport.h"
 #include "webrtc/voice_engine/include/voe_base.h"
 #include "webrtc/voice_engine/include/voe_volume_control.h"
 #include "webrtc/voice_engine/include/voe_codec.h"
 #include "webrtc/voice_engine/include/voe_file.h"
 #include "webrtc/voice_engine/include/voe_network.h"
 #include "webrtc/voice_engine/include/voe_external_media.h"
 #include "webrtc/voice_engine/include/voe_audio_processing.h"
 #include "webrtc/voice_engine/include/voe_video_sync.h"
 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
+#include "webrtc/voice_engine/channel_proxy.h"
+
 //Some WebRTC types for short notations
  using webrtc::VoEBase;
  using webrtc::VoENetwork;
  using webrtc::VoECodec;
  using webrtc::VoEExternalMedia;
  using webrtc::VoEAudioProcessing;
  using webrtc::VoEVideoSync;
  using webrtc::VoERTP_RTCP;
@@ -234,17 +235,17 @@ public:
                              uint32_t* packetsReceived,
                              uint64_t* bytesReceived,
                              uint32_t *cumulativeLost,
                              int32_t* rttMs) override;
   bool GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
                            unsigned int* packetsSent,
                            uint64_t* bytesSent) override;
 
-  bool SetDtmfPayloadType(unsigned char type) override;
+  bool SetDtmfPayloadType(unsigned char type, int freq) override;
 
   bool InsertDTMFTone(int channel, int eventCode, bool outOfBand,
                       int lengthMs, int attenuationDb) override;
 
 private:
   WebrtcAudioConduit(const WebrtcAudioConduit& other) = delete;
   void operator=(const WebrtcAudioConduit& other) = delete;
 
@@ -296,16 +297,17 @@ private:
   // (for when we send data to MediaStreamTracks).  Blocks are aged out as needed.
   struct Processing {
     TimeStamp mTimeStamp;
     uint32_t mRTPTimeStamp; // RTP timestamps received
   };
   AutoTArray<Processing,8> mProcessing;
 
   int mChannel;
+  std::unique_ptr<webrtc::voe::ChannelProxy> mChannelProxy;
   bool mDtmfEnabled;
   RecvCodecList    mRecvCodecList;
 
   Mutex mCodecMutex; // protects mCurSendCodecConfig
   nsAutoPtr<AudioCodecConfig> mCurSendCodecConfig;
 
   // Current "capture" delay (really output plus input delay)
   int32_t mCaptureDelay;
--- a/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.cpp
@@ -1,12 +1,14 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
+#include "plarena.h"
+
 #include "WebrtcGmpVideoCodec.h"
 #include "GmpVideoCodec.h"
 
 namespace mozilla {
 
 WebrtcVideoEncoder* GmpVideoCodec::CreateEncoder() {
   return new WebrtcVideoEncoderProxy();
 }
--- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
@@ -16,59 +16,68 @@
 #include "VideoTypes.h"
 #include "MediaConduitErrors.h"
 
 #include "ImageContainer.h"
 
 #include "webrtc/call.h"
 #include "webrtc/config.h"
 #include "webrtc/common_types.h"
+#include "webrtc/common_types.h"
+#include "webrtc/api/video/video_frame_buffer.h"
+#include "webrtc/logging/rtc_event_log/rtc_event_log.h"
 
 #include <vector>
 
 namespace webrtc {
 class VideoFrame;
 }
 
 namespace mozilla {
 
 // Wrap the webrtc.org Call class adding mozilla add/ref support.
 class WebRtcCallWrapper : public RefCounted<WebRtcCallWrapper>
 {
 public:
   typedef webrtc::Call::Config Config;
 
-  static RefPtr<WebRtcCallWrapper> Create(const Config& config)
+  static RefPtr<WebRtcCallWrapper> Create()
   {
-    return new WebRtcCallWrapper(webrtc::Call::Create(config));
+    return new WebRtcCallWrapper();
   }
 
   webrtc::Call* Call() const
   {
     return mCall.get();
   }
 
   virtual ~WebRtcCallWrapper()
   {
     if (mCall->voice_engine()) {
       webrtc::VoiceEngine* voice_engine = mCall->voice_engine();
       mCall.reset(nullptr); // Force it to release the voice engine reference
       // Delete() must be after all refs are released
       webrtc::VoiceEngine::Delete(voice_engine);
+    } else {
+      // Must ensure it's destroyed *before* the EventLog!
+      mCall.reset(nullptr);
     }
   }
 
   MOZ_DECLARE_REFCOUNTED_TYPENAME(WebRtcCallWrapper)
 
 private:
-  WebRtcCallWrapper() = delete;
-  explicit WebRtcCallWrapper(webrtc::Call* aCall)
-    : mCall(aCall) {}
+  WebRtcCallWrapper()
+  {
+    webrtc::Call::Config config(&mEventLog);
+    mCall.reset(webrtc::Call::Create(config));
+  }
   DISALLOW_COPY_AND_ASSIGN(WebRtcCallWrapper);
   UniquePtr<webrtc::Call> mCall;
+  webrtc::RtcEventLogNullImpl mEventLog;
 };
 
 
 /**
  * Abstract Interface for transporting RTP packets - audio/vidoeo
  * The consumers of this interface are responsible for passing in
  * the RTPfied media packets
  */
@@ -147,25 +156,26 @@ public:
    * responsibility of the concrete implementations of this class to own copy
    * of the frame if needed for time longer than scope of this callback.
    * Such implementations should be quick in processing the frames and return
    * immediately.
    * On the other hand, if decoded video frame is passed through handle, the
    * implementations should keep a reference to the (ref-counted) image object
    * inside until it's no longer needed.
    */
-  virtual void RenderVideoFrame(const unsigned char* buffer,
-                                size_t buffer_size,
+  virtual void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
                                 uint32_t time_stamp,
                                 int64_t render_time,
                                 const ImageHandle& handle) = 0;
-  virtual void RenderVideoFrame(const unsigned char* buffer,
-                                size_t buffer_size,
+  virtual void RenderVideoFrame(const uint8_t* buffer_y,
                                 uint32_t y_stride,
-                                uint32_t cbcr_stride,
+                                const uint8_t* buffer_u,
+                                uint32_t u_stride,
+                                const uint8_t* buffer_v,
+                                uint32_t v_stride,
                                 uint32_t time_stamp,
                                 int64_t render_time,
                                 const ImageHandle& handle) = 0;
 
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoRenderer)
 };
 
 
@@ -535,16 +545,16 @@ public:
    /**
     * Function to enable the audio level extension
     * @param enabled: enable extension
     * @param id: id to be used for this rtp header extension
     * NOTE: See AudioConduit for more information
     */
   virtual MediaConduitErrorCode EnableAudioLevelExtension(bool enabled, uint8_t id) = 0;
 
-  virtual bool SetDtmfPayloadType(unsigned char type) = 0;
+  virtual bool SetDtmfPayloadType(unsigned char type, int freq) = 0;
 
   virtual bool InsertDTMFTone(int channel, int eventCode, bool outOfBand,
                               int lengthMs, int attenuationDb) = 0;
 
 };
 }
 #endif
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -2,17 +2,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "CSFLog.h"
 #include "nspr.h"
 #include "plstr.h"
 
 #include "AudioConduit.h"
-#include "LoadManager.h"
 #include "VideoConduit.h"
 #include "YuvStamper.h"
 #include "mozilla/TemplateLib.h"
 #include "mozilla/media/MediaUtils.h"
 #include "nsComponentManagerUtils.h"
 #include "nsIPrefBranch.h"
 #include "nsIGfxInfo.h"
 #include "nsIPrefService.h"
@@ -20,16 +19,25 @@
 
 #include "nsThreadUtils.h"
 
 #include "pk11pub.h"
 
 #include "webrtc/common_types.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
+#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/api/video/i420_buffer.h"
+#if defined(MAC_OS_X_VERSION_10_8) && \
+  (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_8)
+// XXX not available in Mac 10.7 SDK
+#include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
+#endif
 
 #include "mozilla/Unused.h"
 
 #if defined(MOZ_WIDGET_ANDROID)
 #include "AndroidJNIWrapper.h"
 #include "VideoEngine.h"
 #endif
 
@@ -77,16 +85,56 @@ const unsigned int WebrtcVideoConduit::C
 static const int kViEMinCodecBitrate_bps = KBPS(30);
 
 template<typename T>
 T MinIgnoreZero(const T& a, const T& b)
 {
   return std::min(a? a:b, b? b:a);
 }
 
+template <class t>
+static void
+ConstrainPreservingAspectRatioExact(uint32_t max_fs, t* width, t* height)
+{
+  // We could try to pick a better starting divisor, but it won't make any real
+  // performance difference.
+  for (size_t d = 1; d < std::min(*width, *height); ++d) {
+    if ((*width % d) || (*height % d)) {
+      continue; // Not divisible
+    }
+
+    if (((*width) * (*height)) / (d * d) <= max_fs) {
+      *width /= d;
+      *height /= d;
+      return;
+    }
+  }
+
+  *width = 0;
+  *height = 0;
+}
+
+template <class t>
+static void
+ConstrainPreservingAspectRatio(uint16_t max_width, uint16_t max_height,
+                               t* width, t* height)
+{
+  if (((*width) <= max_width) && ((*height) <= max_height)) {
+    return;
+  }
+
+  if ((*width) * max_height > max_width * (*height)) {
+    (*height) = max_width * (*height) / (*width);
+    (*width) = max_width;
+  } else {
+    (*width) = max_height * (*width) / (*height);
+    (*height) = max_height;
+  }
+}
+
 void
 WebrtcVideoConduit::StreamStatistics::Update(const double aFrameRate,
                                              const double aBitrate)
 {
   mFrameRate.Push(aFrameRate);
   mBitrate.Push(aBitrate);
 }
 
@@ -169,16 +217,18 @@ VideoSessionConduit::Create(RefPtr<WebRt
   }
   CSFLogVerbose(logTag, "%s Successfully created VideoConduit ", __FUNCTION__);
   return obj.forget();
 }
 
 WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall)
   : mTransportMonitor("WebrtcVideoConduit")
   , mRenderer(nullptr)
+  , mVideoAdapter(1)
+  , mVideoBroadcaster()
   , mEngineTransmitting(false)
   , mEngineReceiving(false)
   , mCapId(-1)
   , mCodecMutex("VideoConduit codec db")
   , mInReconfig(false)
   , mRecvStream(nullptr)
   , mSendStream(nullptr)
   , mLastWidth(0)
@@ -192,16 +242,20 @@ WebrtcVideoConduit::WebrtcVideoConduit(R
   , mNumReceivingStreams(1)
   , mVideoLatencyTestEnable(false)
   , mVideoLatencyAvg(0)
   , mMinBitrate(0)
   , mStartBitrate(0)
   , mPrefMaxBitrate(0)
   , mNegotiatedMaxBitrate(0)
   , mMinBitrateEstimate(0)
+  , mDenoising(false)
+  , mLockScaling(false)
+  , mSpatialLayers(1)
+  , mTemporalLayers(1)
   , mCodecMode(webrtc::kRealtimeVideo)
   , mCall(aCall) // refcounted store of the call object
   , mSendStreamConfig(this) // 'this' is stored but not  dereferenced in the constructor.
   , mRecvStreamConfig(this) // 'this' is stored but not  dereferenced in the constructor.
   , mRecvSSRC(0)
   , mRecvSSRCSetInProgress(false)
   , mSendCodecPlugin(nullptr)
   , mRecvCodecPlugin(nullptr)
@@ -312,100 +366,79 @@ WebrtcVideoConduit::ConfigureCodecMode(w
       mode == webrtc::VideoCodecMode::kScreensharing) {
     mCodecMode = mode;
     return kMediaConduitNoError;
   }
 
   return kMediaConduitMalformedArgument;
 }
 
-webrtc::VideoEncoder::EncoderType
-PayloadNameToEncoderType(const std::string& name)
-{
-  if ("VP8" == name) {
-    return webrtc::VideoEncoder::EncoderType::kVp8;
-  } else if ("VP9" == name) { // NOLINT(readability-else-after-return)
-    return webrtc::VideoEncoder::EncoderType::kVp9;
-  } else if ("H264" == name) { // NOLINT(readability-else-after-return)
-    return webrtc::VideoEncoder::EncoderType::kH264;
-  }
-  return webrtc::VideoEncoder::EncoderType::kUnsupportedCodec;
-}
-
 void
 WebrtcVideoConduit::DeleteSendStream()
 {
   mCodecMutex.AssertCurrentThreadOwns();
   if (mSendStream) {
-
-    if (mLoadManager && mSendStream->LoadStateObserver()) {
-      mLoadManager->RemoveObserver(mSendStream->LoadStateObserver());
-    }
-
     mCall->Call()->DestroyVideoSendStream(mSendStream);
     mSendStream = nullptr;
     mEncoder = nullptr;
   }
 }
 
+webrtc::VideoCodecType
+SupportedCodecType(webrtc::VideoCodecType aType)
+{
+  switch (aType) {
+    case webrtc::VideoCodecType::kVideoCodecVP8:
+    case webrtc::VideoCodecType::kVideoCodecVP9:
+    case webrtc::VideoCodecType::kVideoCodecH264:
+      return aType;
+    default:
+      return webrtc::VideoCodecType::kVideoCodecUnknown;
+  }
+  // NOTREACHED
+}
+
 MediaConduitErrorCode
 WebrtcVideoConduit::CreateSendStream()
 {
   mCodecMutex.AssertCurrentThreadOwns();
 
-  webrtc::VideoEncoder::EncoderType encoder_type =
-    PayloadNameToEncoderType(mSendStreamConfig.encoder_settings.payload_name);
-  if (encoder_type == webrtc::VideoEncoder::EncoderType::kUnsupportedCodec) {
+  webrtc::VideoCodecType encoder_type =
+    SupportedCodecType(
+      webrtc::PayloadNameToCodecType(mSendStreamConfig.encoder_settings.payload_name)
+        .value_or(webrtc::VideoCodecType::kVideoCodecUnknown));
+  if (encoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
     return kMediaConduitInvalidSendCodec;
   }
 
   nsAutoPtr<webrtc::VideoEncoder> encoder(
     CreateEncoder(encoder_type, mEncoderConfig.StreamCount() > 0));
   if (!encoder) {
     return kMediaConduitInvalidSendCodec;
   }
 
   mSendStreamConfig.encoder_settings.encoder = encoder.get();
 
-  MOZ_ASSERT(mSendStreamConfig.rtp.ssrcs.size() == mEncoderConfig.StreamCount(),
+  MOZ_RELEASE_ASSERT(mEncoderConfig.NumberOfStreams() != 0,
+                     "mEncoderConfig - There are no configured streams!");
+  MOZ_ASSERT(mSendStreamConfig.rtp.ssrcs.size() == mEncoderConfig.NumberOfStreams(),
              "Each video substream must have a corresponding ssrc.");
 
-  auto cfg = mEncoderConfig.GenerateConfig();
-  if (cfg.streams.empty()) {
-    MOZ_CRASH("mEncoderConfig.GenerateConfig().streams.empty() == true, there are no configured streams!");
-  }
-
-  mSendStream = mCall->Call()->CreateVideoSendStream(mSendStreamConfig, cfg);
+  mSendStream = mCall->Call()->CreateVideoSendStream(mSendStreamConfig.Copy(), mEncoderConfig.CopyConfig());
 
   if (!mSendStream) {
     return kMediaConduitVideoSendStreamError;
   }
+  mSendStream->SetSource(this, webrtc::VideoSendStream::DegradationPreference::kBalanced);
 
   mEncoder = encoder;
 
-  if (mLoadManager && mSendStream->LoadStateObserver()) {
-    mLoadManager->AddObserver(mSendStream->LoadStateObserver());
-  }
-
   return kMediaConduitNoError;
 }
 
-webrtc::VideoDecoder::DecoderType
-PayloadNameToDecoderType(const std::string& name)
-{
-  if ("VP8" == name) {
-    return webrtc::VideoDecoder::DecoderType::kVp8;
-  } else if ("VP9" == name) { // NOLINT(readability-else-after-return)
-    return webrtc::VideoDecoder::DecoderType::kVp9;
-  } else if ("H264" == name) { // NOLINT(readability-else-after-return)
-    return webrtc::VideoDecoder::DecoderType::kH264;
-  }
-  return webrtc::VideoDecoder::DecoderType::kUnsupportedCodec;
-}
-
 void
 WebrtcVideoConduit::DeleteRecvStream()
 {
   mCodecMutex.AssertCurrentThreadOwns();
   if (mRecvStream) {
     mCall->Call()->DestroyVideoReceiveStream(mRecvStream);
     mRecvStream = nullptr;
     mDecoders.clear();
@@ -414,22 +447,23 @@ WebrtcVideoConduit::DeleteRecvStream()
 
 MediaConduitErrorCode
 WebrtcVideoConduit::CreateRecvStream()
 {
   mCodecMutex.AssertCurrentThreadOwns();
 
   webrtc::VideoReceiveStream::Decoder decoder_desc;
   std::unique_ptr<webrtc::VideoDecoder> decoder;
-  webrtc::VideoDecoder::DecoderType decoder_type;
+  webrtc::VideoCodecType decoder_type;
 
   mRecvStreamConfig.decoders.clear();
   for (auto& config : mRecvCodecList) {
-    decoder_type = PayloadNameToDecoderType(config->mName);
-    if (decoder_type == webrtc::VideoDecoder::DecoderType::kUnsupportedCodec) {
+    decoder_type = SupportedCodecType(webrtc::PayloadNameToCodecType(config->mName)
+                                      .value_or(webrtc::VideoCodecType::kVideoCodecUnknown));
+    if (decoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
       CSFLogError(logTag, "%s Unknown decoder type: %s", __FUNCTION__,
                   config->mName.c_str());
       continue;
     }
 
     decoder.reset(CreateDecoder(decoder_type));
 
     if (!decoder) {
@@ -441,38 +475,184 @@ WebrtcVideoConduit::CreateRecvStream()
       // don't stop
       continue;
     }
 
     decoder_desc.decoder = decoder.get();
     mDecoders.push_back(std::move(decoder));
     decoder_desc.payload_name = config->mName;
     decoder_desc.payload_type = config->mType;
+    // XXX Ok, add:
+    // Set decoder_desc.codec_params (fmtp)
     mRecvStreamConfig.decoders.push_back(decoder_desc);
   }
 
-  mRecvStream = mCall->Call()->CreateVideoReceiveStream(mRecvStreamConfig);
-
+  mRecvStream = mCall->Call()->CreateVideoReceiveStream(mRecvStreamConfig.Copy());
   if (!mRecvStream) {
     mDecoders.clear();
     return kMediaConduitUnknownError;
   }
+  CSFLogDebug(logTag, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
+              mRecvStream, mRecvStreamConfig.rtp.remote_ssrc, mRecvStreamConfig.rtp.remote_ssrc);
 
   return kMediaConduitNoError;
 }
 
-static bool CompatibleH264Config(const webrtc::VideoCodecH264& aEncoderSpecificH264,
-                                 const VideoCodecConfig& aCodecConfig)
+static rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
+ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig,
+                              const WebrtcVideoConduit* aConduit)
+{
+  bool is_screencast = aConduit->CodecMode() == webrtc::VideoCodecMode::kScreensharing;
+  // No automatic resizing when using simulcast or screencast.
+  bool automatic_resize = !is_screencast && aConfig->mSimulcastEncodings.size() <= 1;
+  bool frame_dropping = !is_screencast;
+  bool denoising;
+  bool codec_default_denoising = false;
+  if (is_screencast) {
+    denoising = false;
+  } else {
+    // Use codec default if video_noise_reduction is unset.
+    denoising = aConduit->Denoising();
+    codec_default_denoising = !denoising;
+  }
+
+  if (aConfig->mName == "H264") {
+    webrtc::VideoCodecH264 h264_settings =
+        webrtc::VideoEncoder::GetDefaultH264Settings();
+    h264_settings.frameDroppingOn = frame_dropping;
+    h264_settings.packetizationMode = aConfig->mPacketizationMode;
+    return new rtc::RefCountedObject<
+        webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
+
+  } else if (aConfig->mName == "VP8") {
+    webrtc::VideoCodecVP8 vp8_settings =
+        webrtc::VideoEncoder::GetDefaultVp8Settings();
+    vp8_settings.automaticResizeOn = automatic_resize;
+    // VP8 denoising is enabled by default.
+    vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
+    vp8_settings.frameDroppingOn = frame_dropping;
+    return new rtc::RefCountedObject<
+        webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
+
+  } else if (aConfig->mName == "VP9") {
+    webrtc::VideoCodecVP9 vp9_settings =
+        webrtc::VideoEncoder::GetDefaultVp9Settings();
+    if (is_screencast) {
+      // TODO(asapersson): Set to 2 for now since there is a DCHECK in
+      // VideoSendStream::ReconfigureVideoEncoder.
+      vp9_settings.numberOfSpatialLayers = 2;
+    } else {
+      vp9_settings.numberOfSpatialLayers = aConduit->SpatialLayers();
+    }
+    // VP9 denoising is disabled by default.
+    vp9_settings.denoisingOn = codec_default_denoising ? false : denoising;
+    vp9_settings.frameDroppingOn = frame_dropping;
+    return new rtc::RefCountedObject<
+        webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
+  }
+  return nullptr;
+}
+
+std::vector<webrtc::VideoStream>
+WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int height,
+                                                             const webrtc::VideoEncoderConfig& config)
 {
-  if (aEncoderSpecificH264.profile_byte != aCodecConfig.mProfile ||
-      aEncoderSpecificH264.constraints != aCodecConfig.mConstraints ||
-      aEncoderSpecificH264.packetizationMode != aCodecConfig.mPacketizationMode) {
-    return false;
+  auto streamCount = config.number_of_streams;
+  std::vector<webrtc::VideoStream> streams;
+  streams.reserve(streamCount);
+  MOZ_ASSERT(mConduit);
+  MutexAutoLock lock(mConduit->mCodecMutex); // for mCurSendCodecConfig
+
+  // XXX webrtc.org code has a restriction on simulcast layers that each
+  // layer must be 1/2 the dimension of the previous layer - not sure why.
+  // This means we can't use scaleResolutionBy/scaleDownBy (yet), even if
+  // the user specified it.  The one exception is that we can apply it on
+  // the full-resolution stream (which also happens to handle the
+  // non-simulcast usage case). NOTE: we make an assumption here, not in the
+  // spec, that the first stream is the full-resolution stream.
+  auto& simulcastEncoding = mConduit->mCurSendCodecConfig->mSimulcastEncodings[0];
+#if 0
+  // XXX What we'd like to do for each simulcast stream...
+  if (simulcastEncoding.constraints.scaleDownBy > 1.0) {
+    uint32_t new_width = width / simulcastEncoding.constraints.scaleDownBy;
+    uint32_t new_height = height / simulcastEncoding.constraints.scaleDownBy;
+
+    if (new_width != width || new_height != height) {
+      if (streamCount == 1) {
+        CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatio", __FUNCTION__);
+        // Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
+        ConstrainPreservingAspectRatio(new_width, new_height,
+                                       &width, &height);
+      } else {
+        CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatioExact", __FUNCTION__);
+        // webrtc.org supposedly won't tolerate simulcast unless every stream
+        // is exactly the same aspect ratio. 320x240 / 3 = 80x60.
+        ConstrainPreservingAspectRatioExact(new_width * new_height,
+                                            &width, &height);
+      }
+    }
   }
-  return true;
+#endif
+
+  for (size_t idx = streamCount - 1; streamCount > 0; idx--, streamCount--) {
+    webrtc::VideoStream video_stream;
+    // Stream dimensions must be divisable by 2^(n-1), where n is the number of layers.
+    // Each lower resolution layer is 1/2^(n-1) of the size of largest layer,
+    // where n is the number of the layer
+
+    // width/height will be overridden on the first frame; they must be 'sane' for
+    // SetSendCodec()
+    video_stream.width = width >> idx;
+    video_stream.height = height >> idx;
+    // We want to ensure this picks up the current framerate, so indirect
+    video_stream.max_framerate = mConduit->mSendingFramerate;
+
+    simulcastEncoding = mConduit->mCurSendCodecConfig->mSimulcastEncodings[idx];
+    MOZ_ASSERT(simulcastEncoding.constraints.scaleDownBy >= 1.0);
+
+    // leave vector temporal_layer_thresholds_bps empty
+    video_stream.temporal_layer_thresholds_bps.clear();
+    // Calculate these first
+    video_stream.max_bitrate_bps = MinIgnoreZero(simulcastEncoding.constraints.maxBr,
+                                                 kDefaultMaxBitrate_bps);
+    video_stream.max_bitrate_bps = MinIgnoreZero((int) mConduit->mPrefMaxBitrate*1000,
+                                                 video_stream.max_bitrate_bps);
+    video_stream.min_bitrate_bps = (mConduit->mMinBitrate ?
+                                    mConduit->mMinBitrate : kDefaultMinBitrate_bps);
+    if (video_stream.min_bitrate_bps > video_stream.max_bitrate_bps) {
+      video_stream.min_bitrate_bps = video_stream.max_bitrate_bps;
+    }
+    video_stream.target_bitrate_bps = (mConduit->mStartBitrate ?
+                                       mConduit->mStartBitrate : kDefaultStartBitrate_bps);
+    if (video_stream.target_bitrate_bps > video_stream.max_bitrate_bps) {
+      video_stream.target_bitrate_bps = video_stream.max_bitrate_bps;
+    }
+    if (video_stream.target_bitrate_bps < video_stream.min_bitrate_bps) {
+      video_stream.target_bitrate_bps = video_stream.min_bitrate_bps;
+    }
+    // We should use SelectBitrates here for the case of already-sending and no reconfig needed;
+    // overrides the calculations above
+    if (mConduit->mSendingWidth) { // cleared if we need a reconfig
+      mConduit->SelectBitrates(video_stream.width, video_stream.height, // use video_stream.foo!
+                               simulcastEncoding.constraints.maxBr,
+                               mConduit->mLastFramerateTenths, video_stream);
+    }
+
+    video_stream.max_qp = kQpMax;
+    video_stream.SetRid(simulcastEncoding.rid);
+
+    if (mConduit->mCurSendCodecConfig->mName == "H264") {
+      if (mConduit->mCurSendCodecConfig->mEncodingConstraints.maxMbps > 0) {
+        // Not supported yet!
+        CSFLogError(logTag, "%s H.264 max_mbps not supported yet", __FUNCTION__);
+      }
+    }
+    streams.push_back(video_stream);
+  }
+  return streams;
 }
 
 /**
  * Note: Setting the send-codec on the Video Engine will restart the encoder,
  * sets up new SSRC and reset RTP_RTCP module with the new codec setting.
  *
  * Note: this is called from MainThread, and the codec settings are read on
  * videoframe delivery threads (i.e in SendVideoFrame().  With
@@ -498,18 +678,16 @@ WebrtcVideoConduit::ConfigureSendMediaCo
                                 (size_t)webrtc::kMaxSimulcastStreams);
   CSFLogDebug(logTag, "%s for VideoConduit:%p stream count:%d", __FUNCTION__,
               this, static_cast<int>(streamCount));
 
   mSendingFramerate = 0;
   mEncoderConfig.ClearStreams();
   mSendStreamConfig.rtp.rids.clear();
 
-  unsigned short width = 320;
-  unsigned short height = 240;
   int max_framerate;
   if (codecConfig->mEncodingConstraints.maxFps > 0) {
     max_framerate = codecConfig->mEncodingConstraints.maxFps;
   } else {
     max_framerate = DEFAULT_VIDEO_MAX_FRAMERATE;
   }
   // apply restrictions from maxMbps/etc
   mSendingFramerate = SelectSendFrameRate(codecConfig,
@@ -539,175 +717,119 @@ WebrtcVideoConduit::ConfigureSendMediaCo
       mLastWidth = 0;
       mLastHeight = 0;
       mSendingWidth = 0;
       mSendingHeight = 0;
     } else {
       // We're already in a call but changes don't require a reconfiguration.
       // We update the resolutions in the send codec to match the current
       // settings.  Framerate is already set.
-      width = mSendingWidth;
-      height = mSendingHeight;
-      // Bitrates are set in the loop below
     }
   } else if (mMinBitrateEstimate) {
     // Only do this at the start; use "have we send a frame" as a reasonable stand-in.
     // min <= start <= max (which can be -1, note!)
     webrtc::Call::Config::BitrateConfig config;
     config.min_bitrate_bps = mMinBitrateEstimate;
     if (config.start_bitrate_bps < mMinBitrateEstimate) {
       config.start_bitrate_bps = mMinBitrateEstimate;
     }
     if (config.max_bitrate_bps > 0 &&
         config.max_bitrate_bps < mMinBitrateEstimate) {
       config.max_bitrate_bps = mMinBitrateEstimate;
     }
     mCall->Call()->SetBitrateConfig(config);
   }
 
-  for (size_t idx = streamCount - 1; streamCount > 0; idx--, streamCount--) {
-    webrtc::VideoStream video_stream;
-    VideoEncoderConfigBuilder::SimulcastStreamConfig simulcast_config;
-    // Stream dimensions must be divisable by 2^(n-1), where n is the number of layers.
-    // Each lower resolution layer is 1/2^(n-1) of the size of largest layer,
-    // where n is the number of the layer
+  // NOTE: the lifetime of this object MUST be less than the lifetime of the Conduit
+  mEncoderConfig.SetVideoStreamFactory(
+    new rtc::RefCountedObject<WebrtcVideoConduit::VideoStreamFactory>(
+      codecConfig->mName, this));
 
-    // width/height will be overridden on the first frame; they must be 'sane' for
-    // SetSendCodec()
-    video_stream.width = width >> idx;
-    video_stream.height = height >> idx;
-    video_stream.max_framerate = mSendingFramerate;
-    auto& simulcastEncoding = codecConfig->mSimulcastEncodings[idx];
-    // The underlying code (as of 49 and 57) actually ignores the values in
-    // the array, and uses the size of the array + 1.  Chrome uses 3 for
-    // temporal layers when simulcast is in use (see simulcast.cc)
-    video_stream.temporal_layer_thresholds_bps.resize(streamCount > 1 ? 3 : 1);
-    // Calculate these first
-    video_stream.max_bitrate_bps = MinIgnoreZero(simulcastEncoding.constraints.maxBr,
-                                                 kDefaultMaxBitrate_bps);
-    video_stream.max_bitrate_bps = MinIgnoreZero((int) mPrefMaxBitrate,
-                                                 video_stream.max_bitrate_bps);
-    video_stream.min_bitrate_bps = (mMinBitrate ? mMinBitrate : kDefaultMinBitrate_bps);
-    if (video_stream.min_bitrate_bps > video_stream.max_bitrate_bps) {
-      video_stream.min_bitrate_bps = video_stream.max_bitrate_bps;
-    }
-    video_stream.target_bitrate_bps = (mStartBitrate ? mStartBitrate : kDefaultStartBitrate_bps);
-    if (video_stream.target_bitrate_bps > video_stream.max_bitrate_bps) {
-      video_stream.target_bitrate_bps = video_stream.max_bitrate_bps;
-    }
-    if (video_stream.target_bitrate_bps < video_stream.min_bitrate_bps) {
-      video_stream.target_bitrate_bps = video_stream.min_bitrate_bps;
-    }
-    // We should use SelectBitrates here for the case of already-sending and no reconfig needed;
-    // overrides the calculations above
-    if (mSendingWidth) { // cleared if we need a reconfig
-      SelectBitrates(video_stream.width, video_stream.height,
-                     simulcastEncoding.constraints.maxBr,
-                     mLastFramerateTenths, video_stream);
-    }
+  // Always call this to ensure it's reset
+  mVideoAdapter.OnScaleResolutionBy(
+    (streamCount >= 1 && codecConfig->mSimulcastEncodings[0].constraints.scaleDownBy > 1.0) ?
+    rtc::Optional<float>(codecConfig->mSimulcastEncodings[0].constraints.scaleDownBy) :
+    rtc::Optional<float>());
 
-    video_stream.max_qp = kQpMax;
-    simulcast_config.jsScaleDownBy = simulcastEncoding.constraints.scaleDownBy;
-    simulcast_config.jsMaxBitrate = simulcastEncoding.constraints.maxBr; // bps
-    mSendStreamConfig.rtp.rids.push_back(simulcastEncoding.rid);
-
-    if (codecConfig->mName == "H264") {
-      if (codecConfig->mEncodingConstraints.maxMbps > 0) {
-        // Not supported yet!
-        CSFLogError(logTag, "%s H.264 max_mbps not supported yet", __FUNCTION__);
-      }
-    }
-    mEncoderConfig.AddStream(video_stream, simulcast_config);
-  }
-
-  if (codecConfig->mName == "H264") {
-#ifdef MOZ_WEBRTC_OMX
-    mEncoderConfig.SetResolutionDivisor(16);
-#else
-    mEncoderConfig.SetResolutionDivisor(1);
-#endif
-    mEncoderSpecificH264 = webrtc::VideoEncoder::GetDefaultH264Settings();
-    mEncoderSpecificH264.profile_byte = codecConfig->mProfile;
-    mEncoderSpecificH264.constraints = codecConfig->mConstraints;
-    mEncoderSpecificH264.level = codecConfig->mLevel;
-    mEncoderSpecificH264.packetizationMode = codecConfig->mPacketizationMode;
-    mEncoderSpecificH264.scaleDownBy = codecConfig->mEncodingConstraints.scaleDownBy;
-
-    // XXX parse the encoded SPS/PPS data
-    // paranoia
-    mEncoderSpecificH264.spsData = nullptr;
-    mEncoderSpecificH264.spsLen = 0;
-    mEncoderSpecificH264.ppsData = nullptr;
-    mEncoderSpecificH264.ppsLen = 0;
-
-    mEncoderConfig.SetEncoderSpecificSettings(&mEncoderSpecificH264);
-  } else {
-    mEncoderConfig.SetEncoderSpecificSettings(nullptr);
-    mEncoderConfig.SetResolutionDivisor(1);
-  }
+  // XXX parse the encoded SPS/PPS data and set spsData/spsLen/ppsData/ppsLen
+  mEncoderConfig.SetEncoderSpecificSettings(ConfigureVideoEncoderSettings(codecConfig, this));
+  mEncoderConfig.SetResolutionDivisor(1);
 
   mEncoderConfig.SetContentType(mCodecMode == webrtc::kRealtimeVideo ?
     webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo :
     webrtc::VideoEncoderConfig::ContentType::kScreen);
   // for the GMP H.264 encoder/decoder!!
   mEncoderConfig.SetMinTransmitBitrateBps(0);
+  // Expected max number of encodings
+  mEncoderConfig.SetMaxEncodings(codecConfig->mSimulcastEncodings.size());
 
   // If only encoder stream attibutes have been changed, there is no need to stop,
   // create a new webrtc::VideoSendStream, and restart.
   // Recreating on PayloadType change may be overkill, but is safe.
   if (mSendStream) {
     if (!RequiresNewSendStream(*codecConfig)) {
-      if (!mSendStream->ReconfigureVideoEncoder(mEncoderConfig.GenerateConfig())) {
-        CSFLogError(logTag, "%s: ReconfigureVideoEncoder failed", __FUNCTION__);
-        // Don't return here; let it try to destroy the encoder and rebuild it
-        // on StartTransmitting()
-      } else {
-        return kMediaConduitNoError;
-      }
+      mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig());
+      return kMediaConduitNoError;
     }
 
     condError = StopTransmitting();
     if (condError != kMediaConduitNoError) {
       return condError;
     }
 
     // This will cause a new encoder to be created by StartTransmitting()
     MutexAutoLock lock(mCodecMutex);
     DeleteSendStream();
   }
 
   mSendStreamConfig.encoder_settings.payload_name = codecConfig->mName;
   mSendStreamConfig.encoder_settings.payload_type = codecConfig->mType;
   mSendStreamConfig.rtp.rtcp_mode = webrtc::RtcpMode::kCompound;
   mSendStreamConfig.rtp.max_packet_size = kVideoMtu;
-  mSendStreamConfig.overuse_callback = mLoadManager.get();
 
   // See Bug 1297058, enabling FEC when basic NACK is to be enabled in H.264 is problematic
   if (codecConfig->RtcpFbFECIsSet() &&
       !(codecConfig->mName == "H264" && codecConfig->RtcpFbNackIsSet(""))) {
-    mSendStreamConfig.rtp.fec.ulpfec_payload_type = codecConfig->mULPFECPayloadType;
-    mSendStreamConfig.rtp.fec.red_payload_type = codecConfig->mREDPayloadType;
-    mSendStreamConfig.rtp.fec.red_rtx_payload_type = codecConfig->mREDRTXPayloadType;
+    mSendStreamConfig.rtp.ulpfec.ulpfec_payload_type = codecConfig->mULPFECPayloadType;
+    mSendStreamConfig.rtp.ulpfec.red_payload_type = codecConfig->mREDPayloadType;
+    mSendStreamConfig.rtp.ulpfec.red_rtx_payload_type = codecConfig->mREDRTXPayloadType;
   }
 
   mSendStreamConfig.rtp.nack.rtp_history_ms =
     codecConfig->RtcpFbNackIsSet("") ? 1000 : 0;
 
   {
     MutexAutoLock lock(mCodecMutex);
     // Copy the applied config for future reference.
     mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
   }
 
+  mSendStreamConfig.rtp.rids.clear();
+  bool has_rid = false;
+  for (size_t idx = 0; idx < streamCount; idx++) {
+    auto& simulcastEncoding = mCurSendCodecConfig->mSimulcastEncodings[idx];
+    if (simulcastEncoding.rid[0]) {
+      has_rid = true;
+      break;
+    }
+  }
+  if (has_rid) {
+    for (size_t idx = streamCount; idx > 0; idx--) {
+      auto& simulcastEncoding = mCurSendCodecConfig->mSimulcastEncodings[idx-1];
+      mSendStreamConfig.rtp.rids.push_back(simulcastEncoding.rid);
+    }
+  }
+
   return condError;
 }
 
 bool
 WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc)
 {
+  CSFLogDebug(logTag, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
   mRecvStreamConfig.rtp.remote_ssrc = ssrc;
 
   unsigned int current_ssrc;
   if (!GetRemoteSSRC(&current_ssrc)) {
     return false;
   }
 
   if (current_ssrc == ssrc) {
@@ -871,21 +993,22 @@ bool WebrtcVideoConduit::GetRTCPReceiver
         "%s for VideoConduit:%p ssrc not found in SendStream stats.",
         __FUNCTION__, this);
       return false;
     }
     *jitterMs = ind->second.rtcp_stats.jitter;
     *cumulativeLost = ind->second.rtcp_stats.cumulative_lost;
     *bytesReceived = ind->second.rtp_stats.MediaPayloadBytes();
     *packetsReceived = ind->second.rtp_stats.transmitted.packets;
-    int64_t rtt = mSendStream->GetRtt();
+    auto stats = mCall->Call()->GetStats();
+    int64_t rtt = stats.rtt_ms;
 #ifdef DEBUG
     if (rtt > INT32_MAX) {
       CSFLogError(logTag,
-        "%s for VideoConduit:%p mRecvStream->GetRtt() is larger than the"
+        "%s for VideoConduit:%p RTT is larger than the"
         " maximum size of an RTCP RTT.", __FUNCTION__, this);
     }
 #endif
     if (rtt > 0) {
       *rttMs = rtt;
     } else {
       *rttMs = 0;
     }
@@ -966,24 +1089,34 @@ WebrtcVideoConduit::InitMain()
       // in automation
       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
             "media.peerconnection.video.min_bitrate_estimate", &temp))))
       {
         if (temp >= 0) {
           mMinBitrateEstimate = temp; // bps!
         }
       }
-      bool use_loadmanager = false;
-      if (!NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
-            "media.navigator.load_adapt", &use_loadmanager))))
+      if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
+            "media.peerconnection.video.svc.spatial", &temp))))
       {
-        if (use_loadmanager) {
-          mLoadManager = LoadManagerBuild();
-        }
+         if (temp >= 0) {
+            mSpatialLayers = temp;
+         }
       }
+      if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
+            "media.peerconnection.video.svc.temporal", &temp))))
+      {
+         if (temp >= 0) {
+            mTemporalLayers = temp;
+         }
+      }
+      Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
+        "media.peerconnection.video.denoising", &mDenoising)));
+      Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
+        "media.peerconnection.video.lock_scaling", &mLockScaling)));
     }
   }
 #ifdef MOZ_WIDGET_ANDROID
   // get the JVM
   JavaVM *jvm = jsjni_GetVM();
 
   if (mozilla::camera::VideoEngine::SetAndroidObjects(jvm) != 0) {
     CSFLogError(logTag,  "%s: could not set Android objects", __FUNCTION__);
@@ -1186,35 +1319,35 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
   // Now decide if we need to recreate the receive stream, or can keep it
   if (!mRecvStream ||
       CodecsDifferent(recv_codecs, mRecvCodecList) ||
       mRecvStreamConfig.rtp.nack.rtp_history_ms != (use_nack_basic ? 1000 : 0) ||
       mRecvStreamConfig.rtp.remb != use_remb ||
       mRecvStreamConfig.rtp.tmmbr != use_tmmbr ||
       mRecvStreamConfig.rtp.keyframe_method != kf_request_method ||
       (use_fec &&
-       (mRecvStreamConfig.rtp.fec.ulpfec_payload_type != ulpfec_payload_type ||
-        mRecvStreamConfig.rtp.fec.red_payload_type != red_payload_type))) {
+       (mRecvStreamConfig.rtp.ulpfec.ulpfec_payload_type != ulpfec_payload_type ||
+        mRecvStreamConfig.rtp.ulpfec.red_payload_type != red_payload_type))) {
 
     condError = StopReceiving();
     if (condError != kMediaConduitNoError) {
       return condError;
     }
 
     // If we fail after here things get ugly
     mRecvStreamConfig.rtp.rtcp_mode = webrtc::RtcpMode::kCompound;
     mRecvStreamConfig.rtp.nack.rtp_history_ms = use_nack_basic ? 1000 : 0;
     mRecvStreamConfig.rtp.remb = use_remb;
     mRecvStreamConfig.rtp.tmmbr = use_tmmbr;
     mRecvStreamConfig.rtp.keyframe_method = kf_request_method;
 
     if (use_fec) {
-      mRecvStreamConfig.rtp.fec.ulpfec_payload_type = ulpfec_payload_type;
-      mRecvStreamConfig.rtp.fec.red_payload_type = red_payload_type;
-      mRecvStreamConfig.rtp.fec.red_rtx_payload_type = -1;
+      mRecvStreamConfig.rtp.ulpfec.ulpfec_payload_type = ulpfec_payload_type;
+      mRecvStreamConfig.rtp.ulpfec.red_payload_type = red_payload_type;
+      mRecvStreamConfig.rtp.ulpfec.red_rtx_payload_type = -1;
     }
 
     // SetRemoteSSRC should have populated this already
     mRecvSSRC = mRecvStreamConfig.rtp.remote_ssrc;
 
     // XXX ugh! same SSRC==0 problem that webrtc.org has
     if (mRecvSSRC == 0) {
       // Handle un-signalled SSRCs by creating a random one and then when it actually gets set,
@@ -1273,118 +1406,139 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
       }
     }
     return StartReceiving();
   }
   return kMediaConduitNoError;
 }
 
 webrtc::VideoDecoder*
-WebrtcVideoConduit::CreateDecoder(webrtc::VideoDecoder::DecoderType aType)
+WebrtcVideoConduit::CreateDecoder(webrtc::VideoCodecType aType)
 {
   webrtc::VideoDecoder* decoder = nullptr;
-
-  if (aType == webrtc::VideoDecoder::kH264) {
-    // get an external decoder
-#ifdef MOZ_WEBRTC_OMX
-    decoder = OMXVideoCodec::CreateDecoder(OMXVideoCodec::CodecType::CODEC_H264);
-#else
-    decoder = GmpVideoCodec::CreateDecoder();
-#endif
-    if (decoder) {
-      mRecvCodecPlugin = static_cast<WebrtcVideoDecoder*>(decoder);
-    }
 #ifdef MOZ_WEBRTC_MEDIACODEC
-  } else if (aType == webrtc::VideoDecoder::kVp8) {
-    bool enabled = false;
-    // attempt to get a decoder
-    enabled = mozilla::Preferences::GetBool(
-                  "media.navigator.hardware.vp8_decode.acceleration_enabled", false);
-    if (enabled) {
-      nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
-      if (gfxInfo) {
-        int32_t status;
-        nsCString discardFailureId;
+  bool enabled = false;
+#endif
 
-        if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
+  switch (aType) {
+    case webrtc::VideoCodecType::kVideoCodecH264:
+      // get an external decoder
+#ifdef MOZ_WEBRTC_OMX
+      decoder = OMXVideoCodec::CreateDecoder(OMXVideoCodec::CodecType::CODEC_H264);
+#else
+      decoder = GmpVideoCodec::CreateDecoder();
+#endif
+      if (decoder) {
+        mRecvCodecPlugin = static_cast<WebrtcVideoDecoder*>(decoder);
+      }
+      break;
+
+    case webrtc::VideoCodecType::kVideoCodecVP8:
+#ifdef MOZ_WEBRTC_MEDIACODEC
+      // attempt to get a decoder
+      enabled = mozilla::Preferences::GetBool(
+        "media.navigator.hardware.vp8_decode.acceleration_enabled", false);
+      if (enabled) {
+        nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
+        if (gfxInfo) {
+          int32_t status;
+          nsCString discardFailureId;
+
+          if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
                              nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_DECODE,
                              discardFailureId, &status))) {
 
-          if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
-            NS_WARNING("VP8 decoder hardware is not whitelisted: disabling.\n");
-          } else {
-            decoder = MediaCodecVideoCodec::CreateDecoder(
-                                        MediaCodecVideoCodec::CodecType::CODEC_VP8);
+            if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
+              NS_WARNING("VP8 decoder hardware is not whitelisted: disabling.\n");
+            } else {
+              decoder = MediaCodecVideoCodec::CreateDecoder(
+                MediaCodecVideoCodec::CodecType::CODEC_VP8);
+            }
           }
         }
       }
-    }
+#endif
+      // Use a software VP8 decoder as a fallback.
+      if (!decoder) {
+        decoder = webrtc::VP8Decoder::Create();
+      }
+      break;
 
-    // Use a software VP8 decoder as a fallback.
-    if (!decoder) {
-      decoder = webrtc::VideoDecoder::Create(aType);
-    }
-#endif
-  } else {
-    decoder = webrtc::VideoDecoder::Create(aType);
+    case webrtc::VideoCodecType::kVideoCodecVP9:
+      MOZ_ASSERT(webrtc::VP9Decoder::IsSupported());
+      decoder = webrtc::VP9Decoder::Create();
+      break;
+
+    default:
+      break;
   }
 
   return decoder;
 }
 
 webrtc::VideoEncoder*
-WebrtcVideoConduit::CreateEncoder(webrtc::VideoEncoder::EncoderType aType,
+WebrtcVideoConduit::CreateEncoder(webrtc::VideoCodecType aType,
                                   bool enable_simulcast)
 {
   webrtc::VideoEncoder* encoder = nullptr;
-  if (aType == webrtc::VideoEncoder::kH264) {
-    // get an external encoder
+#ifdef MOZ_WEBRTC_MEDIACODEC
+  bool enabled = false;
+#endif
+
+  switch (aType) {
+    case webrtc::VideoCodecType::kVideoCodecH264:
+      // get an external encoder
 #ifdef MOZ_WEBRTC_OMX
-    encoder = OMXVideoCodec::CreateEncoder(OMXVideoCodec::CodecType::CODEC_H264);
+      encoder = OMXVideoCodec::CreateEncoder(OMXVideoCodec::CodecType::CODEC_H264);
 #else
-    encoder = GmpVideoCodec::CreateEncoder();
+      encoder = GmpVideoCodec::CreateEncoder();
 #endif
-    if (encoder) {
-      mSendCodecPlugin = static_cast<WebrtcVideoEncoder*>(encoder);
-    }
+      if (encoder) {
+        mSendCodecPlugin = static_cast<WebrtcVideoEncoder*>(encoder);
+      }
+      break;
+
+    case webrtc::VideoCodecType::kVideoCodecVP8:
 #ifdef MOZ_WEBRTC_MEDIACODEC
-  } else if (aType == webrtc::VideoEncoder::kVp8) {
-    bool enabled = false;
-    // attempt to get a encoder
-    enabled = mozilla::Preferences::GetBool(
-                  "media.navigator.hardware.vp8_encode.acceleration_enabled", false);
-    if (enabled) {
-      nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
-      if (gfxInfo) {
-        int32_t status;
-        nsCString discardFailureId;
+      // attempt to get a encoder
+      enabled = mozilla::Preferences::GetBool(
+        "media.navigator.hardware.vp8_encode.acceleration_enabled", false);
+      if (enabled) {
+        nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
+        if (gfxInfo) {
+          int32_t status;
+          nsCString discardFailureId;
 
-        if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
-                         nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_ENCODE,
-                         discardFailureId, &status))) {
+          if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
+                           nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_ENCODE,
+                           discardFailureId, &status))) {
 
-          if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
-            NS_WARNING("VP8 encoder hardware is not whitelisted: disabling.\n");
-          } else {
-            encoder = MediaCodecVideoCodec::CreateEncoder(
-                                        MediaCodecVideoCodec::CodecType::CODEC_VP8);
+            if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
+              NS_WARNING("VP8 encoder hardware is not whitelisted: disabling.\n");
+            } else {
+              encoder = MediaCodecVideoCodec::CreateEncoder(
+                                          MediaCodecVideoCodec::CodecType::CODEC_VP8);
+            }
           }
         }
       }
-    }
+#endif
+      // Use a software VP8 encoder as a fallback.
+      if (!encoder) {
+        encoder = webrtc::VP8Encoder::Create();
+      }
+      break;
 
-    // Use a software VP8 encoder as a fallback.
-    if (!encoder) {
-      encoder = webrtc::VideoEncoder::Create(aType, enable_simulcast);
-    }
-#endif
-  } else {
-    encoder = webrtc::VideoEncoder::Create(aType, enable_simulcast);
+    case webrtc::VideoCodecType::kVideoCodecVP9:
+      encoder = webrtc::VP9Encoder::Create();
+      break;
+
+    default:
+      break;
   }
-
   return encoder;
 }
 
 struct ResolutionAndBitrateLimits
 {
   int resolution_in_mb;
   int min_bitrate_bps;
   int start_bitrate_bps;
@@ -1467,56 +1621,16 @@ WebrtcVideoConduit::SelectBitrates(
   if (mStartBitrate && mStartBitrate > out_start) {
     out_start = mStartBitrate;
   }
   out_start = std::max(out_start, out_min);
 
   MOZ_ASSERT(mPrefMaxBitrate == 0 || out_max <= mPrefMaxBitrate);
 }
 
-template <class t>
-static void
-ConstrainPreservingAspectRatioExact(uint32_t max_fs, t* width, t* height)
-{
-  // We could try to pick a better starting divisor, but it won't make any real
-  // performance difference.
-  for (size_t d = 1; d < std::min(*width, *height); ++d) {
-    if ((*width % d) || (*height % d)) {
-      continue; // Not divisible
-    }
-
-    if (((*width) * (*height)) / (d * d) <= max_fs) {
-      *width /= d;
-      *height /= d;
-      return;
-    }
-  }
-
-  *width = 0;
-  *height = 0;
-}
-
-template <class t>
-static void
-ConstrainPreservingAspectRatio(uint16_t max_width, uint16_t max_height,
-                               t* width, t* height)
-{
-  if (((*width) <= max_width) && ((*height) <= max_height)) {
-    return;
-  }
-
-  if ((*width) * max_height > max_width * (*height)) {
-    (*height) = max_width * (*height) / (*width);
-    (*width) = max_width;
-  } else {
-    (*width) = max_height * (*width) / (*height);
-    (*height) = max_height;
-  }
-}
-
 // XXX we need to figure out how to feed back changes in preferred capture
 // resolution to the getUserMedia source.
 // Returns boolean if we've submitted an async change (and took ownership
 // of *frame's data)
 bool
 WebrtcVideoConduit::SelectSendResolution(unsigned short width,
                                          unsigned short height,
                                          webrtc::VideoFrame* frame) // may be null
@@ -1577,16 +1691,17 @@ WebrtcVideoConduit::SelectSendResolution
       max_height = 16 * std::min(mb_height, mb_max);
       ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
     }
   }
 
 
   // Adapt to getUserMedia resolution changes
   // check if we need to reconfigure the sending resolution.
+  // NOTE: mSendingWidth != mLastWidth, because of maxwidth/height/etc above
   bool changed = false;
   if (mSendingWidth != width || mSendingHeight != height) {
     CSFLogDebug(logTag, "%s: resolution changing to %ux%u (from %ux%u)",
                 __FUNCTION__, width, height, mSendingWidth, mSendingHeight);
     // This will avoid us continually retrying this operation if it fails.
     // If the resolution changes, we'll try again.  In the meantime, we'll
     // keep using the old size in the encoder.
     mSendingWidth = width;
@@ -1618,19 +1733,18 @@ WebrtcVideoConduit::SelectSendResolution
       // all frames while in the process of a reconfig and then encode the
       // frame that started the reconfig, which is close.  There may be
       // barely perceptible glitch in the video due to the dropped frame(s).
       mInReconfig = true;
 
       // We can't pass a UniquePtr<> or unique_ptr<> to a lambda directly
       webrtc::VideoFrame* new_frame = nullptr;
       if (frame) {
-        new_frame = new webrtc::VideoFrame();
         // the internal buffer pointer is refcounted, so we don't have 2 copies here
-        new_frame->ShallowCopy(*frame);
+        new_frame = new webrtc::VideoFrame(*frame);
       }
       RefPtr<WebrtcVideoConduit> self(this);
       RefPtr<Runnable> webrtc_runnable =
         media::NewRunnableFrom([self, width, height, new_frame]() -> nsresult {
             UniquePtr<webrtc::VideoFrame> local_frame(new_frame); // Simplify cleanup
 
             MutexAutoLock lock(self->mCodecMutex);
             return self->ReconfigureSendCodec(width, height, new_frame);
@@ -1652,80 +1766,24 @@ WebrtcVideoConduit::SelectSendResolution
 
 nsresult
 WebrtcVideoConduit::ReconfigureSendCodec(unsigned short width,
                                          unsigned short height,
                                          webrtc::VideoFrame* frame)
 {
   mCodecMutex.AssertCurrentThreadOwns();
 
-  if (!mEncoderConfig.StreamCount()) {
-    CSFLogError(logTag, "%s: No VideoStreams configured", __FUNCTION__);
-    return NS_ERROR_FAILURE;
-  }
-
-  mEncoderConfig.ForEachStream(
-    [&](webrtc::VideoStream& video_stream,
-        VideoEncoderConfigBuilder::SimulcastStreamConfig& simStream,
-        const size_t index)
-  {
-    mInReconfig = false;
-
-    CSFLogDebug(logTag,
-                "%s: Requesting resolution change to %ux%u (from %ux%u), jsScaleDownBy=%f",
-                __FUNCTION__, width, height, static_cast<unsigned int>(video_stream.width),
-                static_cast<unsigned int>(video_stream.height), simStream.jsScaleDownBy);
-
-    MOZ_ASSERT(simStream.jsScaleDownBy >= 1.0);
-    uint32_t new_width = (width / simStream.jsScaleDownBy);
-    uint32_t new_height = (height / simStream.jsScaleDownBy);
-    video_stream.width = width;
-    video_stream.height = height;
-    // XXX this should depend on the final values (below) of video_stream.width/height, not
-    // the current value calculated on the incoming framesize (largest simulcast layer)
-    video_stream.max_framerate = mSendingFramerate;
-    SelectBitrates(video_stream.width, video_stream.height,
-                   // XXX formerly was MinIgnoreZero(mNegotiatedMaxBitrate, simStream.jsMaxBitrate),
-                   simStream.jsMaxBitrate,
-                   mLastFramerateTenths, video_stream);
-    CSFLogVerbose(logTag, "%s: new_width=%" PRIu32 " new_height=%" PRIu32,
-                  __FUNCTION__, new_width, new_height);
-    if (new_width != video_stream.width || new_height != video_stream.height) {
-      if (mEncoderConfig.StreamCount() == 1) {
-        CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatio", __FUNCTION__);
-        // Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
-        ConstrainPreservingAspectRatio(new_width, new_height,
-                                       &video_stream.width, &video_stream.height);
-      } else {
-        CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatioExact", __FUNCTION__);
-        // webrtc.org supposedly won't tolerate simulcast unless every stream
-        // is exactly the same aspect ratio. 320x240 / 3 = 80x60.
-        ConstrainPreservingAspectRatioExact(new_width * new_height,
-                                            &video_stream.width, &video_stream.height);
-      }
-    }
-
-    CSFLogDebug(
-      logTag, "%s: Encoder resolution changed to %ux%u @ %ufps, bitrate %u:%u",
-      __FUNCTION__, static_cast<unsigned int>(video_stream.width),
-      static_cast<unsigned int>(video_stream.height), mSendingFramerate,
-      video_stream.min_bitrate_bps, video_stream.max_bitrate_bps);
-  });
   // Test in case the stream hasn't started yet!  We could get a frame in
   // before we get around to StartTransmitting(), and that would dispatch a
   // runnable to call this.
+  mInReconfig = false;
   if (mSendStream) {
-    if (!mSendStream->ReconfigureVideoEncoder(mEncoderConfig.GenerateConfig())) {
-      CSFLogError(logTag, "%s: ReconfigureVideoEncoder failed", __FUNCTION__);
-      return NS_ERROR_FAILURE;
-    }
-
+    mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig());
     if (frame) {
-      // XXX I really don't like doing this from MainThread...
-      mSendStream->Input()->IncomingCapturedFrame(*frame);
+      mVideoBroadcaster.OnFrame(*frame);
       CSFLogDebug(logTag, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
     }
   }
   return NS_OK;
 }
 
 unsigned int
 WebrtcVideoConduit::SelectSendFrameRate(const VideoCodecConfig* codecConfig,
@@ -1772,28 +1830,90 @@ WebrtcVideoConduit::SendVideoFrame(unsig
 
   // Transmission should be enabled before we insert any frames.
   if (!mEngineTransmitting) {
     CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
   // insert the frame to video engine in I420 format only
-  webrtc::VideoFrame video_frame;
-  video_frame.CreateFrame(video_buffer, width, height, webrtc::kVideoRotation_0);
-  video_frame.set_timestamp(capture_time);
-  video_frame.set_render_time_ms(capture_time);
+  const int stride_y = width;
+  const int stride_uv = (width + 1) / 2;
+
+  const uint8_t* buffer_y = video_buffer;
+  const uint8_t* buffer_u = buffer_y + stride_y * height;
+  const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2);
+  rtc::Callback0<void> callback_unused;
+  rtc::scoped_refptr<webrtc::WrappedI420Buffer> video_frame_buffer(
+    new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+      width, height,
+      buffer_y, stride_y,
+      buffer_u, stride_uv,
+      buffer_v, stride_uv,
+      callback_unused));
+
+  webrtc::VideoFrame video_frame(video_frame_buffer, capture_time,
+                                 capture_time, webrtc::kVideoRotation_0); // XXX
 
   return SendVideoFrame(video_frame);
 }
 
+void
+WebrtcVideoConduit::AddOrUpdateSink(
+  rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+  const rtc::VideoSinkWants& wants)
+{
+  CSFLogDebug(logTag, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
+              mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(),
+              wants.max_pixel_count ? *wants.max_pixel_count : -1,
+              wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1);
+
+  // MUST run on the same thread as first call (MainThread)
+  if (!NS_IsMainThread()) {
+    // This can be asynchronous
+    RefPtr<WebrtcVideoConduit> self(this);
+    NS_DispatchToMainThread(media::NewRunnableFrom([self, sink, wants]() {
+          self->mVideoBroadcaster.AddOrUpdateSink(sink, wants);
+          self->OnSinkWantsChanged(self->mVideoBroadcaster.wants());
+          return NS_OK;
+        }));
+  } else {
+    mVideoBroadcaster.AddOrUpdateSink(sink, wants);
+    OnSinkWantsChanged(mVideoBroadcaster.wants());
+  }
+}
+
+void
+WebrtcVideoConduit::RemoveSink(
+  rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
+{
+  mVideoBroadcaster.RemoveSink(sink);
+  OnSinkWantsChanged(mVideoBroadcaster.wants());
+}
+
+void
+WebrtcVideoConduit::OnSinkWantsChanged(
+  const rtc::VideoSinkWants& wants) {
+  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
+  if (!mLockScaling) {
+    mVideoAdapter.OnResolutionRequest(wants.max_pixel_count,
+                                      wants.max_pixel_count_step_up);
+  }
+}
+
 MediaConduitErrorCode
 WebrtcVideoConduit::SendVideoFrame(webrtc::VideoFrame& frame)
 {
-  CSFLogDebug(logTag, "%s", __FUNCTION__);
+  // XXX Google uses a "timestamp_aligner" to translate timestamps from the
+  // camera via TranslateTimestamp(); we should look at doing the same.  This
+  // avoids sampling error when capturing frames, but google had to deal with some
+  // broken cameras, include Logitech c920's IIRC.
+
+  CSFLogVerbose(logTag, "%s (send SSRC %u (0x%x))", __FUNCTION__,
+              mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front());
   // See if we need to recalculate what we're sending.
   // Don't compute mSendingWidth/Height, since those may not be the same as the input.
   {
     MutexAutoLock lock(mCodecMutex);
     if (mInReconfig) {
       // Waiting for it to finish
       return kMediaConduitNoError;
     }
@@ -1801,24 +1921,88 @@ WebrtcVideoConduit::SendVideoFrame(webrt
       CSFLogVerbose(logTag, "%s: call SelectSendResolution with %ux%u",
                     __FUNCTION__, frame.width(), frame.height());
       if (SelectSendResolution(frame.width(), frame.height(), &frame)) {
         // SelectSendResolution took ownership of the data in i420_frame.
         // Submit the frame after reconfig is done
         return kMediaConduitNoError;
       }
     }
+    // adapt input video to wants of sink
+    if (!mVideoBroadcaster.frame_wanted()) {
+      return kMediaConduitNoError;
+    }
 
-    if (mSendStream) { // can happen before StartTransmitting()
-      mSendStream->Input()->IncomingCapturedFrame(frame);
+    int adapted_width;
+    int adapted_height;
+    int crop_width;
+    int crop_height;
+    int crop_x;
+    int crop_y;
+    if (!mVideoAdapter.AdaptFrameResolution(
+          frame.width(), frame.height(),
+          frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec,
+          &crop_width, &crop_height, &adapted_width, &adapted_height)) {
+      // VideoAdapter dropped the frame.
+      return kMediaConduitNoError;
     }
+    crop_x = (frame.width() - crop_width) / 2;
+    crop_y = (frame.height() - crop_height) / 2;
+
+    rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
+    if (adapted_width == frame.width() && adapted_height == frame.height()) {
+      // No adaption - optimized path.
+      buffer = frame.video_frame_buffer();
+      // XXX Bug 1367651 - Use nativehandles where possible instead of software scaling
+#ifdef WEBRTC_MAC
+#if defined(MAC_OS_X_VERSION_10_8) && \
+  (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_8)
+      // XXX not available in Mac 10.7 SDK
+      // code adapted from objvideotracksource.mm
+    } else if (frame.nativeHandle) {
+      // Adapted CVPixelBuffer frame.
+      buffer = new rtc::RefCountedObject<CoreVideoFrameBuffer>(
+        static_cast<CVPixelBufferRef>(frame.nativeHandle), adapted_width, adapted_height,
+        crop_width, crop_height, crop_x, crop_y);
+#endif
+#elif WEBRTC_WIN
+      // XX FIX
+#elif WEBRTC_LINUX
+      // XX FIX
+#elif WEBRTC_ANDROID
+      // XX FIX
+#endif
+    } else {
+      // Adapted I420 frame.
+      // TODO(magjed): Optimize this I420 path.
+      rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
+        webrtc::I420Buffer::Create(adapted_width, adapted_height);
+      i420_buffer->CropAndScaleFrom(*frame.video_frame_buffer(), crop_x, crop_y, crop_width, crop_height);
+      buffer = i420_buffer;
+    }
+
+#if 0
+    // Applying rotation is only supported for legacy reasons and performance is
+    // not critical here.
+    // XXX We're rotating at capture time; if we want to change that we'll need to
+    // rotate at input to any sink that can't handle rotated frames internally. We
+    // probably wouldn't need to rotate here unless the CVO extension wasn't agreed to.
+    // That state (CVO) would feed apply_rotation()
+    webrtc::VideoRotation rotation = static_cast<webrtc::VideoRotation>(frame.rotation);
+    if (apply_rotation() && rotation != kVideoRotation_0) {
+      buffer = I420Buffer::Rotate(*buffer->NativeToI420Buffer(), rotation);
+      rotation = kVideoRotation_0;
+    }
+#endif
+
+    mVideoBroadcaster.OnFrame(webrtc::VideoFrame(buffer, webrtc::kVideoRotation_0,
+                                                 /*rotation, translated_*/ frame.timestamp_us()));
   }
 
   mSendStreamStats.FrameDeliveredToEncoder();
-  CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__);
   return kMediaConduitNoError;
 }
 
 // Transport Layer Callbacks
 
 MediaConduitErrorCode
 WebrtcVideoConduit::DeliverPacket(const void* data, int len)
 {
@@ -1840,29 +2024,47 @@ WebrtcVideoConduit::DeliverPacket(const 
   }
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
 {
+  // Handle the unknown ssrc (and ssrc-not-signaled case).
+  // We can't just do this here; it has to happen on MainThread :-(
+  // We also don't want to drop the packet, nor stall this thread, so we hold
+  // the packet (and any following) for inserting once the SSRC is set.
   bool queue = mRecvSSRCSetInProgress;
-  if (mRecvSSRC != ssrc && !queue) {
+  if (queue || mRecvSSRC != ssrc) {
+    // capture packet for insertion after ssrc is set -- do this before
+    // sending the runnable, since it may pull from this.  Since it
+    // dispatches back to us, it's less critial to do this here, but doesn't
+    // hurt.
+    UniquePtr<QueuedPacket> packet((QueuedPacket*) malloc(sizeof(QueuedPacket) + len-1));
+    packet->mLen = len;
+    memcpy(packet->mData, data, len);
+    CSFLogDebug(logTag, "queuing packet: seq# %u, Len %d ",
+                (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
+    if (queue) {
+      mQueuedPackets.AppendElement(Move(packet));
+      return kMediaConduitNoError;
+    }
+    // a new switch needs to be done
+    // any queued packets are from a previous switch that hasn't completed
+    // yet; drop them and only process the latest SSRC
+    mQueuedPackets.Clear();
+    mQueuedPackets.AppendElement(Move(packet));
+
+    CSFLogDebug(logTag, "%s: switching from SSRC %u to %u", __FUNCTION__,
+                mRecvSSRC, ssrc);
     // we "switch" here immediately, but buffer until the queue is released
     mRecvSSRC = ssrc;
     mRecvSSRCSetInProgress = true;
     queue = true;
-    // any queued packets are from a previous switch that hasn't completed