Bug 1588346: removed webrtc sdk directory. r=dminor draft
authorNils Ohlmeier [:drno] <drno@ohlmeier.org>
Sun, 13 Oct 2019 04:17:50 +0000
changeset 2378213 04f86827e66408ce1b1364f8179087d5208cea32
parent 2377671 c5e6477c3a245a97d4c3cdd5d3e406f8abaf94ad
child 2378214 6d98c78351cf1442d2a2d2d22e774dbdb3a09cc6
push id434593
push userreviewbot
push dateSun, 13 Oct 2019 04:18:09 +0000
treeherdertry@6d98c78351cf [default view] [failures only]
reviewersdminor
bugs1588346
milestone71.0a1
Bug 1588346: removed webrtc sdk directory. r=dminor Differential Diff: PHID-DIFF-yh7ndskd3zunxheyjirs
media/webrtc/trunk/webrtc/sdk/BUILD.gn
media/webrtc/trunk/webrtc/sdk/OWNERS
media/webrtc/trunk/webrtc/sdk/android/AndroidManifest.xml
media/webrtc/trunk/webrtc/sdk/android/BUILD.gn
media/webrtc/trunk/webrtc/sdk/android/OWNERS
media/webrtc/trunk/webrtc/sdk/android/PRESUBMIT.py
media/webrtc/trunk/webrtc/sdk/android/README
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/AudioSource.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/AudioTrack.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera1Capturer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera2Capturer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CameraEnumerator.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DataChannel.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DefaultAudioProcessingFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DtmfSender.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/EglBase.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/EglRenderer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/EncodedImage.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlRectDrawer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlShader.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlUtil.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/IceCandidate.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/MediaConstraints.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/MediaSource.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/MediaStream.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Metrics.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/NetworkMonitor.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/OWNERS
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/PeerConnection.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/PeerConnectionFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/PostProcessingFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/RTCStats.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/RTCStatsCollectorCallback.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/RTCStatsReport.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/RendererCommon.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/RtpParameters.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/RtpReceiver.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/RtpSender.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/SdpObserver.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/SessionDescription.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/StatsObserver.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/StatsReport.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/TurnCustomizer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoCapturer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoDecoder.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoEncoder.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoFrame.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoRenderer.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoSink.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoSource.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/VideoTrack.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/YuvConverter.java
media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/YuvHelper.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/AndroidManifest.xml
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/ant.properties
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/build.xml
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/project.properties
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultAudioProcessingFactoryTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoDecoderTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java
media/webrtc/trunk/webrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/AndroidVideoTrackSourceObserver.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/CalledByNative.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/CameraSession.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/EglBase10.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/EglBase14.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/Histogram.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/JniCommon.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/NativeClassQualifiedName.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/TextureBufferImpl.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/VP8Decoder.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/VP8Encoder.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/VP9Decoder.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/VP9Encoder.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/VideoCodecType.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/WrappedNativeVideoDecoder.java
media/webrtc/trunk/webrtc/sdk/android/src/java/org/webrtc/WrappedNativeVideoEncoder.java
media/webrtc/trunk/webrtc/sdk/android/src/jni/DEPS
media/webrtc/trunk/webrtc/sdk/android/src/jni/OWNERS
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidhistogram_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidmediacodeccommon.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidmediadecoder_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidmediadecoder_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidmediaencoder_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidmetrics_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidnetworkmonitor_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidvideotracksource.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidvideotracksource.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/androidvideotracksource_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/class_loader.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/class_loader.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/classreferenceholder.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/classreferenceholder.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/encodedimage.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/encodedimage.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/hardwarevideoencoderfactory.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/jni_common.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/jni_generator_helper.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/jni_generator_helper.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/jni_helpers.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/jni_helpers.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/jni_onload.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/nv12buffer_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/nv21buffer_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/OWNERS
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/androidnetworkmonitor_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/androidnetworkmonitor_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/audio_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/audio_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/audiotrack_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/callsessionfilerotatinglogsink_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/datachannel.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/datachannel.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/defaultaudioprocessingfactory_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/dtmfsender_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/java_native_conversion.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/java_native_conversion.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/logging_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/media_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/media_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/mediaconstraints_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/mediaconstraints_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/mediasource_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/mediastream_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/mediastreamtrack_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/null_audio_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/null_media_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/null_video_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/ownedfactoryandthreads.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/ownedfactoryandthreads.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/peerconnection_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/peerconnectionfactory_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/peerconnectionobserver_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/peerconnectionobserver_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/rtcstatscollectorcallbackwrapper.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/rtcstatscollectorcallbackwrapper.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/rtpreceiver_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/rtpreceiverobserver_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/rtpreceiverobserver_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/rtpsender_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/sdpobserver_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/statsobserver_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/statsobserver_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/turncustomizer_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/video_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/pc/video_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/surfacetexturehelper_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/video_renderer_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videocodecinfo.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videocodecinfo.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/videocodecstatus.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videocodecstatus.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/videodecoderfactorywrapper.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videodecoderfactorywrapper.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/videodecoderfallback.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videodecoderwrapper.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videodecoderwrapper.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/videoencoderfactorywrapper.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videoencoderfactorywrapper.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/videoencoderfallback.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videoencoderwrapper.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videoencoderwrapper.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/videofilerenderer_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videoframe.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/videoframe.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/videotrack_jni.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/vp8codec.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/vp9codec.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/wrappednativecodec.cc
media/webrtc/trunk/webrtc/sdk/android/src/jni/wrappednativecodec.h
media/webrtc/trunk/webrtc/sdk/android/src/jni/yuvhelper.cc
media/webrtc/trunk/webrtc/sdk/android/tests/src/org/webrtc/CameraEnumerationTest.java
media/webrtc/trunk/webrtc/sdk/objc/DEPS
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Configuration.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Audio/RTCAudioSession.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Audio/RTCAudioSessionConfiguration.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/RTCDispatcher+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/RTCDispatcher.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/RTCFieldTrials.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/RTCLogging.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/UIDevice+RTCDevice.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/helpers.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/helpers.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLNSVideoView.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/OWNERS
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoDecoderFactory.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoEncoderFactory.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMetrics.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Native.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Stats.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpFragmentationHeader.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCSSLAdapter.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCTracing.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecH264.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP8.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP9.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoEncoderSettings.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/UI/RTCCameraPreviewView.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/UI/RTCNSGLVideoView.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCCVPixelBuffer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer+Private.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCOpenGLDefines.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCShader.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/RTCShader.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/avfoundationformatmapper.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/avfoundationformatmapper.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/avfoundationvideocapturer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/avfoundationvideocapturer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.cc
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/helpers.cc
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/helpers.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.cc
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter_unittest.cc
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/objc_video_decoder_factory.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/objc_video_decoder_factory.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/objc_video_encoder_factory.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Classes/VideoToolbox/objc_video_encoder_factory.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/OWNERS
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioSource.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioTrack.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCConfiguration.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannel.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDispatcher.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFieldTrials.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFileLogger.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFileVideoCapturer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceCandidate.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceServer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIntervalRange.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCLegacyStatsReport.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCLogging.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMTLNSVideoView.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMacros.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaConstraints.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaSource.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStream.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMetrics.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMetricsSampleInfo.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSSLAdapter.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSessionDescription.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCTracing.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCapturer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecH264.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP8.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP9.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoRenderer.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoSource.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoTrack.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoViewShading.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Headers/WebRTC/WebRTC.h
media/webrtc/trunk/webrtc/sdk/objc/Framework/Info.plist
media/webrtc/trunk/webrtc/sdk/objc/Framework/Modules/module.modulemap
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCAudioSessionTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCCameraVideoCapturerTests.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCConfigurationTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCDataChannelConfigurationTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCFileVideoCapturer_xctest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCIceCandidateTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCIceServerTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCIntervalRangeTests.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCMTLVideoView_xctest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCMediaConstraintsTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCPeerConnectionTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCSessionDescriptionTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/RTCTracingTest.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/avformatmappertests.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/foreman.mp4
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/main.m
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/objc_video_decoder_factory_tests.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/objc_video_encoder_factory_tests.mm
media/webrtc/trunk/webrtc/sdk/objc/Framework/UnitTests/scoped_cftyperef_tests.mm
media/webrtc/trunk/webrtc/sdk/objc/OWNERS
media/webrtc/trunk/webrtc/sdk/objc/WebRTC.podspec
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/BUILD.gn
+++ /dev/null
@@ -1,861 +0,0 @@
-# Copyright 2016 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS.  All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-import("../webrtc.gni")
-if (is_ios) {
-  import("//build/config/ios/rules.gni")
-  import("//build/config/ios/ios_sdk.gni")
-}
-
-declare_args() {
-  # Determine whether or not to include metal rendering
-  rtc_use_metal_rendering = is_mac || (is_ios && current_cpu == "arm64")
-}
-
-group("sdk") {
-  public_deps = []
-  if (!build_with_chromium) {
-    if (is_android) {
-      public_deps += [ "android" ]
-    }
-    if (is_ios) {
-      public_deps += [ ":framework_objc" ]
-    }
-  }
-}
-
-if (is_ios || is_mac) {
-  config("common_config_objc") {
-    include_dirs = [
-      "objc/Framework/Classes",
-      "objc/Framework/Classes/Audio",
-      "objc/Framework/Classes/Common",
-      "objc/Framework/Classes/Metal",
-      "objc/Framework/Classes/PeerConnection",
-      "objc/Framework/Classes/UI",
-      "objc/Framework/Classes/Video",
-      "objc/Framework/Classes/VideoToolbox",
-      "objc/Framework/Headers",
-    ]
-    cflags = [
-      "-Wstrict-overflow",
-      "-Wmissing-field-initializers",
-    ]
-  }
-
-  rtc_static_library("common_objc") {
-    sources = [
-      "objc/Framework/Classes/Common/NSString+StdString.h",
-      "objc/Framework/Classes/Common/NSString+StdString.mm",
-      "objc/Framework/Classes/Common/RTCDispatcher+Private.h",
-      "objc/Framework/Classes/Common/RTCDispatcher.m",
-      "objc/Framework/Classes/Common/RTCFieldTrials.mm",
-      "objc/Framework/Classes/Common/RTCLogging.mm",
-      "objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.h",
-      "objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.m",
-      "objc/Framework/Classes/Common/helpers.h",
-      "objc/Framework/Classes/Common/helpers.mm",
-      "objc/Framework/Classes/Common/scoped_cftyperef.h",
-      "objc/Framework/Headers/WebRTC/RTCDispatcher.h",
-      "objc/Framework/Headers/WebRTC/RTCFieldTrials.h",
-      "objc/Framework/Headers/WebRTC/RTCLogging.h",
-      "objc/Framework/Headers/WebRTC/RTCMacros.h",
-    ]
-
-    deps = [
-      "../rtc_base:rtc_base",
-    ]
-    configs += [ "..:common_objc" ]
-
-    public_configs = [ ":common_config_objc" ]
-
-    if (is_ios) {
-      sources += [
-        "objc/Framework/Classes/Common/UIDevice+RTCDevice.mm",
-        "objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h",
-      ]
-    }
-
-    if (!build_with_chromium) {
-      sources += [
-        "objc/Framework/Classes/Common/RTCFileLogger.mm",
-        "objc/Framework/Headers/WebRTC/RTCFileLogger.h",
-      ]
-    }
-  }
-
-  if (!build_with_chromium) {
-    rtc_static_library("audio_objc") {
-      sources = [
-        "objc/Framework/Classes/Audio/RTCAudioSession+Configuration.mm",
-        "objc/Framework/Classes/Audio/RTCAudioSession+Private.h",
-        "objc/Framework/Classes/Audio/RTCAudioSession.mm",
-        "objc/Framework/Classes/Audio/RTCAudioSessionConfiguration.m",
-        "objc/Framework/Headers/WebRTC/RTCAudioSession.h",
-        "objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h",
-      ]
-      configs += [ "..:common_objc" ]
-
-      public_configs = [ ":common_config_objc" ]
-
-      deps = [
-        ":common_objc",
-        "../rtc_base:rtc_base_approved",
-      ]
-
-      if (is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-    }
-
-    # Keep videotracksource related code in a separate target so that we can
-    # build PeerConnectionFactory without audio/video support when that's called
-    # for.
-    rtc_static_library("videotracksource_objc") {
-      sources = [
-        "objc/Framework/Classes/Video/RTCCVPixelBuffer.mm",
-        "objc/Framework/Classes/Video/RTCI420Buffer+Private.h",
-        "objc/Framework/Classes/Video/RTCI420Buffer.mm",
-        "objc/Framework/Classes/Video/objc_frame_buffer.h",
-        "objc/Framework/Classes/Video/objc_frame_buffer.mm",
-        "objc/Framework/Classes/Video/objcvideotracksource.h",
-        "objc/Framework/Classes/Video/objcvideotracksource.mm",
-        "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
-      ]
-
-      deps = [
-        ":common_objc",
-        "../api:libjingle_peerconnection_api",
-        "../api:video_frame_api",
-        "../api:video_frame_api_i420",
-        "../common_video",
-        "../media:rtc_media_base",
-        "../rtc_base:rtc_base",
-      ]
-
-      configs += [ "..:common_objc" ]
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-    }
-
-    rtc_static_library("video_objc") {
-      sources = [
-        "objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h",
-        "objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm",
-        "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h",
-        "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm",
-        "objc/Framework/Classes/Video/RTCDefaultShader.h",
-        "objc/Framework/Classes/Video/RTCDefaultShader.mm",
-        "objc/Framework/Classes/Video/RTCI420TextureCache.h",
-        "objc/Framework/Classes/Video/RTCI420TextureCache.mm",
-        "objc/Framework/Classes/Video/RTCOpenGLDefines.h",
-        "objc/Framework/Classes/Video/RTCShader.h",
-        "objc/Framework/Classes/Video/RTCShader.mm",
-        "objc/Framework/Classes/Video/avfoundationformatmapper.h",
-        "objc/Framework/Classes/Video/avfoundationformatmapper.mm",
-        "objc/Framework/Classes/Video/avfoundationvideocapturer.h",
-        "objc/Framework/Classes/Video/avfoundationvideocapturer.mm",
-      ]
-      libs = []
-      if (is_ios) {
-        sources += [
-          "objc/Framework/Classes/Video/RTCNV12TextureCache.h",
-          "objc/Framework/Classes/Video/RTCNV12TextureCache.m",
-        ]
-        libs += [
-          "GLKit.framework",
-          "OpenGLES.framework",
-          "QuartzCore.framework",
-        ]
-      } else if (is_mac) {
-        sources += []
-
-        libs += [
-          "CoreMedia.framework",
-          "CoreVideo.framework",
-          "OpenGL.framework",
-        ]
-      }
-
-      deps = [
-        ":common_objc",
-        ":videotracksource_objc",
-        "../api:libjingle_peerconnection_api",
-        "../api:optional",
-        "../common_video",
-        "../media:rtc_media_base",
-        "../rtc_base:rtc_base",
-      ]
-
-      configs += [ "..:common_objc" ]
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-    }
-
-    rtc_static_library("ui_objc") {
-      if (is_ios) {
-        sources = [
-          "objc/Framework/Classes/UI/RTCCameraPreviewView.m",
-          "objc/Framework/Classes/UI/RTCEAGLVideoView.m",
-          "objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h",
-          "objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
-        ]
-      }
-      if (is_mac) {
-        sources = [
-          "objc/Framework/Classes/UI/RTCNSGLVideoView.m",
-          "objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h",
-        ]
-      }
-      configs += [ "..:common_objc" ]
-      deps = [
-        ":common_objc",
-        ":peerconnection_objc",
-      ]
-    }
-
-    if (rtc_use_metal_rendering) {
-      rtc_static_library("metal_objc") {
-        sources = [
-          "objc/Framework/Classes/Metal/RTCMTLI420Renderer.h",
-          "objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm",
-          "objc/Framework/Classes/Metal/RTCMTLRenderer+Private.h",
-          "objc/Framework/Classes/Metal/RTCMTLRenderer.h",
-          "objc/Framework/Classes/Metal/RTCMTLRenderer.mm",
-        ]
-        if (is_ios) {
-          sources += [
-            "objc/Framework/Classes/Metal/RTCMTLNV12Renderer.h",
-            "objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm",
-            "objc/Framework/Classes/Metal/RTCMTLVideoView.m",
-            "objc/Framework/Headers/WebRTC/RTCMTLVideoView.h",
-          ]
-        }
-        if (is_mac) {
-          sources += [
-            "objc/Framework/Classes/Metal/RTCMTLNSVideoView.m",
-            "objc/Framework/Headers/WebRTC/RTCMTLNSVideoView.h",
-          ]
-        }
-        libs = [
-          "CoreVideo.framework",
-          "Metal.framework",
-          "MetalKit.framework",
-        ]
-        deps = [
-          ":video_objc",
-          "../api:video_frame_api",
-          "../rtc_base:rtc_base_approved",
-        ]
-        configs += [ "..:common_objc" ]
-        public_configs = [ ":common_config_objc" ]
-      }
-    }
-
-    rtc_static_library("peerconnection_objc") {
-      sources = [
-        "objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource.mm",
-        "objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m",
-        "objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h",
-      ]
-      if (is_ios) {
-        sources += [
-          "objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m",
-          "objc/Framework/Headers/WebRTC/RTCFileVideoCapturer.h",
-        ]
-      }
-      libs = [ "AVFoundation.framework" ]
-
-      if (is_ios) {
-        libs += [ "CoreGraphics.framework" ]
-      }
-
-      configs += [ "..:common_objc" ]
-
-      public_configs = [ ":common_config_objc" ]
-
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-
-      deps = [
-        ":common_objc",
-        ":corevideoframebuffer_objc",
-        ":peerconnectionfactory_objc",
-        ":video_objc",
-        "../api:video_frame_api",
-        "../media:rtc_media_base",
-        "../pc:libjingle_peerconnection",
-        "../rtc_base:rtc_base",
-      ]
-
-      if (rtc_use_metal_rendering) {
-        deps += [ ":metal_objc" ]
-      }
-
-      if (rtc_build_libyuv) {
-        deps += [ "$rtc_libyuv_dir" ]
-        public_deps = [
-          "$rtc_libyuv_dir",
-        ]
-      }
-    }
-
-    rtc_static_library("peerconnectionfactory_objc") {
-      sources = [
-        "objc/Framework/Classes/PeerConnection/RTCDefaultVideoDecoderFactory.m",
-        "objc/Framework/Classes/PeerConnection/RTCDefaultVideoEncoderFactory.m",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoCodecH264.mm",
-      ]
-
-      public_configs = [ ":common_config_objc" ]
-
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-
-      deps = [
-        ":common_objc",
-        ":corevideoframebuffer_objc",
-        ":peerconnectionfactory_base_objc",
-        ":video_objc",
-        ":videotoolbox_objc",
-        ":videotracksource_objc",
-        "../api:video_frame_api",
-        "../api/audio_codecs:builtin_audio_decoder_factory",
-        "../api/audio_codecs:builtin_audio_encoder_factory",
-        "../api/video_codecs:video_codecs_api",
-        "../media:rtc_audio_video",
-        "../media:rtc_media_base",
-        "../pc:create_pc_factory",
-        "../pc:peerconnection",
-        "../rtc_base:rtc_base",
-        "../system_wrappers:field_trial_api",
-      ]
-    }
-
-    # Build the PeerConnectionFactory without audio/video support.
-    # This target depends on the objc_peeerconnectionfactory_base which still
-    # includes some audio/video related objects such as RTCAudioSource because
-    # these objects are just thin wrappers of native C++ interfaces required
-    # when implementing webrtc::PeerConnectionFactoryInterface and
-    # webrtc::PeerConnectionInterface.
-    # The applications which only use WebRTC DataChannel can depend on this
-    # target instead of rtc_sdk_objc.
-    rtc_static_library("peerconnectionfactory_no_media_objc") {
-      defines = [ "HAVE_NO_MEDIA" ]
-
-      sources = [
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory.mm",
-      ]
-
-      public_configs = [ ":common_config_objc" ]
-
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-
-      deps = [
-        ":peerconnectionfactory_base_objc",
-        "../api:libjingle_peerconnection_api",
-        "../rtc_base:rtc_base",
-      ]
-    }
-
-    rtc_static_library("peerconnectionfactory_base_objc") {
-      sources = [
-        "objc/Framework/Classes/PeerConnection/RTCAudioSource+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCAudioSource.mm",
-        "objc/Framework/Classes/PeerConnection/RTCAudioTrack+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm",
-        "objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h",
-        "objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCConfiguration.mm",
-        "objc/Framework/Classes/PeerConnection/RTCDataChannel+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCDataChannel.mm",
-        "objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration.mm",
-        "objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm",
-        "objc/Framework/Classes/PeerConnection/RTCIceCandidate+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCIceCandidate.mm",
-        "objc/Framework/Classes/PeerConnection/RTCIceServer+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCIceServer.mm",
-        "objc/Framework/Classes/PeerConnection/RTCIntervalRange+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCIntervalRange.mm",
-        "objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport.mm",
-        "objc/Framework/Classes/PeerConnection/RTCMediaConstraints+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm",
-        "objc/Framework/Classes/PeerConnection/RTCMediaSource+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCMediaSource.mm",
-        "objc/Framework/Classes/PeerConnection/RTCMediaStream+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCMediaStream.mm",
-        "objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack.mm",
-        "objc/Framework/Classes/PeerConnection/RTCMetrics.mm",
-        "objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo.mm",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnection+Native.h",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnection+Stats.mm",
-        "objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm",
-        "objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm",
-        "objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm",
-        "objc/Framework/Classes/PeerConnection/RTCRtpFragmentationHeader.mm",
-        "objc/Framework/Classes/PeerConnection/RTCRtpParameters+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCRtpParameters.mm",
-        "objc/Framework/Classes/PeerConnection/RTCRtpReceiver+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm",
-        "objc/Framework/Classes/PeerConnection/RTCRtpSender+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCRtpSender.mm",
-        "objc/Framework/Classes/PeerConnection/RTCSSLAdapter.mm",
-        "objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm",
-        "objc/Framework/Classes/PeerConnection/RTCTracing.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m",
-        "objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoCodecVP8.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoCodecVP9.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoEncoderSettings.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoFrame+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h",
-        "objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoSource+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCVideoSource.mm",
-        "objc/Framework/Classes/PeerConnection/RTCVideoTrack+Private.h",
-        "objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm",
-        "objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.h",
-        "objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm",
-        "objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.h",
-        "objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm",
-        "objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
-        "objc/Framework/Headers/WebRTC/RTCAudioSource.h",
-        "objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
-        "objc/Framework/Headers/WebRTC/RTCConfiguration.h",
-        "objc/Framework/Headers/WebRTC/RTCDataChannel.h",
-        "objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h",
-        "objc/Framework/Headers/WebRTC/RTCIceCandidate.h",
-        "objc/Framework/Headers/WebRTC/RTCIceServer.h",
-        "objc/Framework/Headers/WebRTC/RTCIntervalRange.h",
-        "objc/Framework/Headers/WebRTC/RTCLegacyStatsReport.h",
-        "objc/Framework/Headers/WebRTC/RTCMediaConstraints.h",
-        "objc/Framework/Headers/WebRTC/RTCMediaSource.h",
-        "objc/Framework/Headers/WebRTC/RTCMediaStream.h",
-        "objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h",
-        "objc/Framework/Headers/WebRTC/RTCMetrics.h",
-        "objc/Framework/Headers/WebRTC/RTCMetricsSampleInfo.h",
-        "objc/Framework/Headers/WebRTC/RTCPeerConnection.h",
-        "objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h",
-        "objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h",
-        "objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h",
-        "objc/Framework/Headers/WebRTC/RTCRtpParameters.h",
-        "objc/Framework/Headers/WebRTC/RTCRtpReceiver.h",
-        "objc/Framework/Headers/WebRTC/RTCRtpSender.h",
-        "objc/Framework/Headers/WebRTC/RTCSSLAdapter.h",
-        "objc/Framework/Headers/WebRTC/RTCSessionDescription.h",
-        "objc/Framework/Headers/WebRTC/RTCTracing.h",
-        "objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
-        "objc/Framework/Headers/WebRTC/RTCVideoFrame.h",
-        "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
-        "objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
-        "objc/Framework/Headers/WebRTC/RTCVideoSource.h",
-        "objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
-        "objc/Framework/Headers/WebRTC/RTCVideoViewShading.h",
-      ]
-
-      configs += [ "..:common_objc" ]
-
-      public_configs = [ ":common_config_objc" ]
-
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-
-      deps = [
-        ":common_objc",
-        ":corevideoframebuffer_objc",
-        ":videotracksource_objc",
-        "../api:video_frame_api",
-        "../api/video_codecs:video_codecs_api",
-        "../common_video",
-        "../media:rtc_media_base",
-        "../modules:module_api",
-        "../modules/video_coding:webrtc_vp8",
-        "../modules/video_coding:webrtc_vp9",
-        "../pc:peerconnection",
-        "../rtc_base:rtc_base",
-      ]
-    }
-
-    if (rtc_include_tests) {
-      if (is_ios) {
-        rtc_source_set("sdk_unittests_sources") {
-          testonly = true
-          include_dirs = [
-            "objc/Framework/Headers",
-            "objc/Framework/Classes",
-          ]
-
-          sources = [
-            "objc/Framework/UnitTests/RTCFileVideoCapturer_xctest.mm",
-          ]
-
-          if (current_cpu == "arm64" || use_ios_simulator) {
-            sources += [ "objc/Framework/UnitTests/RTCMTLVideoView_xctest.mm" ]
-          }
-
-          if (use_ios_simulator) {
-            # Only include this file on simulator, as it's already
-            # included in device builds.
-            sources += [ "objc/Framework/Classes/Metal/RTCMTLVideoView.m" ]
-            libs = [ "CoreVideo.framework" ]
-          }
-          deps = [
-            ":common_objc",
-            ":peerconnection_objc",
-            ":peerconnectionfactory_objc",
-            ":videotoolbox_objc",
-            ":videotracksource_objc",
-            "..//system_wrappers:system_wrappers_default",
-            "../media:rtc_media_base",
-            "../modules:module_api",
-            "../rtc_base:rtc_base",
-            "../rtc_base:rtc_base_tests_utils",
-            "../system_wrappers:system_wrappers_default",
-          ]
-          public_deps = [
-            "//build/config/ios:xctest",
-            "//third_party/ocmock",
-          ]
-        }
-
-        bundle_data("sdk_unittests_bundle_data") {
-          # Sample video taken from https://media.xiph.org/video/derf/
-          sources = [
-            "objc/Framework/UnitTests/foreman.mp4",
-          ]
-          outputs = [
-            "{{bundle_resources_dir}}/{{source_file_part}}",
-          ]
-        }
-
-        rtc_ios_xctest_test("sdk_unittests") {
-          info_plist = "//test/ios/Info.plist"
-          sources = [
-            "objc/Framework/UnitTests/main.m",
-          ]
-
-          _bundle_id_suffix = ios_generic_test_bundle_id_suffix
-          extra_substitutions = [ "GTEST_BUNDLE_ID_SUFFIX=$_bundle_id_suffix" ]
-          deps = [
-            ":sdk_unittests_bundle_data",
-            ":sdk_unittests_sources",
-          ]
-          ldflags = [ "-all_load" ]
-        }
-      }
-
-      # TODO(denicija): once all tests are migrated to xctest remove this source set.
-      rtc_source_set("sdk_unittests_objc") {
-        testonly = true
-
-        sources = [
-          "objc/Framework/UnitTests/RTCConfigurationTest.mm",
-          "objc/Framework/UnitTests/RTCDataChannelConfigurationTest.mm",
-          "objc/Framework/UnitTests/RTCIceCandidateTest.mm",
-          "objc/Framework/UnitTests/RTCIceServerTest.mm",
-          "objc/Framework/UnitTests/RTCIntervalRangeTests.mm",
-          "objc/Framework/UnitTests/RTCMediaConstraintsTest.mm",
-          "objc/Framework/UnitTests/RTCPeerConnectionTest.mm",
-          "objc/Framework/UnitTests/RTCSessionDescriptionTest.mm",
-          "objc/Framework/UnitTests/RTCTracingTest.mm",
-          "objc/Framework/UnitTests/avformatmappertests.mm",
-          "objc/Framework/UnitTests/objc_video_decoder_factory_tests.mm",
-          "objc/Framework/UnitTests/objc_video_encoder_factory_tests.mm",
-          "objc/Framework/UnitTests/scoped_cftyperef_tests.mm",
-        ]
-        if (is_ios &&
-            !(use_ios_simulator &&
-              # The tests crash on these simulator versions:
-              (ios_sdk_version == "10.0" || ios_sdk_version == "10.1"))) {
-          sources +=
-              [ "objc/Framework/UnitTests/RTCCameraVideoCapturerTests.mm" ]
-        }
-
-        # |-ObjC| flag needed to make sure category method implementations
-        # are included:
-        # https://developer.apple.com/library/mac/qa/qa1490/_index.html
-        ldflags = [ "-ObjC" ]
-
-        defines = [ "GTEST_RELATIVE_PATH" ]
-        deps = [
-          ":common_objc",
-          ":peerconnection_objc",
-          ":peerconnectionfactory_objc",
-          ":videotoolbox_objc",
-          ":videotracksource_objc",
-          "..//system_wrappers:system_wrappers_default",
-          "../media:rtc_media_base",
-          "../modules:module_api",
-          "../rtc_base:rtc_base_tests_utils",
-          "../system_wrappers:system_wrappers_default",
-          "//test:test_support",
-          "//third_party/ocmock",
-        ]
-
-        if (is_ios) {
-          sources += [ "objc/Framework/UnitTests/RTCAudioSessionTest.mm" ]
-        }
-
-        if (!build_with_chromium && is_clang) {
-          # Suppress warnings from the Chromium Clang plugin
-          # (bugs.webrtc.org/163).
-          suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-        }
-      }
-    }
-
-    if (is_ios) {
-      ios_framework_bundle("framework_objc") {
-        info_plist = "objc/Framework/Info.plist"
-        output_name = "WebRTC"
-
-        common_objc_headers = [
-          "objc/Framework/Headers/WebRTC/RTCAudioSession.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoCodec.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h",
-          "objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h",
-          "objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
-          "objc/Framework/Headers/WebRTC/RTCAudioSource.h",
-          "objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
-          "objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h",
-          "objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h",
-          "objc/Framework/Headers/WebRTC/RTCConfiguration.h",
-          "objc/Framework/Headers/WebRTC/RTCDataChannel.h",
-          "objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h",
-          "objc/Framework/Headers/WebRTC/RTCDispatcher.h",
-          "objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
-          "objc/Framework/Headers/WebRTC/RTCFieldTrials.h",
-          "objc/Framework/Headers/WebRTC/RTCFileVideoCapturer.h",
-          "objc/Framework/Headers/WebRTC/RTCIceCandidate.h",
-          "objc/Framework/Headers/WebRTC/RTCIceServer.h",
-          "objc/Framework/Headers/WebRTC/RTCIntervalRange.h",
-          "objc/Framework/Headers/WebRTC/RTCLegacyStatsReport.h",
-          "objc/Framework/Headers/WebRTC/RTCLogging.h",
-          "objc/Framework/Headers/WebRTC/RTCMacros.h",
-          "objc/Framework/Headers/WebRTC/RTCMediaConstraints.h",
-          "objc/Framework/Headers/WebRTC/RTCMediaSource.h",
-          "objc/Framework/Headers/WebRTC/RTCMediaStream.h",
-          "objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h",
-          "objc/Framework/Headers/WebRTC/RTCMetrics.h",
-          "objc/Framework/Headers/WebRTC/RTCMetricsSampleInfo.h",
-          "objc/Framework/Headers/WebRTC/RTCPeerConnection.h",
-          "objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h",
-          "objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h",
-          "objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h",
-          "objc/Framework/Headers/WebRTC/RTCRtpParameters.h",
-          "objc/Framework/Headers/WebRTC/RTCRtpReceiver.h",
-          "objc/Framework/Headers/WebRTC/RTCRtpSender.h",
-          "objc/Framework/Headers/WebRTC/RTCSSLAdapter.h",
-          "objc/Framework/Headers/WebRTC/RTCSessionDescription.h",
-          "objc/Framework/Headers/WebRTC/RTCTracing.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoCodecH264.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoDecoderVP8.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoDecoderVP9.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoFrame.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoSource.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
-          "objc/Framework/Headers/WebRTC/RTCVideoViewShading.h",
-          "objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h",
-          "objc/Framework/Headers/WebRTC/WebRTC.h",
-        ]
-        if (rtc_use_metal_rendering) {
-          common_objc_headers +=
-              [ "objc/Framework/Headers/WebRTC/RTCMTLVideoView.h" ]
-        }
-        sources = common_objc_headers
-        public_headers = common_objc_headers
-
-        if (!build_with_chromium) {
-          sources += [ "objc/Framework/Headers/WebRTC/RTCFileLogger.h" ]
-          public_headers += [ "objc/Framework/Headers/WebRTC/RTCFileLogger.h" ]
-        }
-
-        ldflags = [
-          "-all_load",
-          "-install_name",
-          "@rpath/$output_name.framework/$output_name",
-        ]
-
-        deps = [
-          ":audio_objc",
-          ":peerconnection_objc",
-          ":ui_objc",
-          "../rtc_base:rtc_base_approved",
-          "../system_wrappers:field_trial_default",
-          "../system_wrappers:metrics_default",
-        ]
-
-        libs = [
-          "AVFoundation.framework",
-          "CoreGraphics.framework",
-          "CoreMedia.framework",
-          "GLKit.framework",
-        ]
-
-        configs += [ "..:common_objc" ]
-
-        public_configs = [ ":common_config_objc" ]
-
-        if (!build_with_chromium && is_clang) {
-          # Suppress warnings from the Chromium Clang plugin
-          # (bugs.webrtc.org/163).
-          configs -= [ "//build/config/clang:find_bad_constructs" ]
-        }
-      }
-    }
-
-    rtc_static_library("rtc_sdk_objc") {
-      complete_static_lib = true
-      deps = [
-        ":peerconnection_objc",
-        ":ui_objc",
-        "../system_wrappers:field_trial_default",
-        "../system_wrappers:metrics_default",
-      ]
-    }
-
-    rtc_static_library("corevideoframebuffer_objc") {
-      sources = [
-        "objc/Framework/Classes/Video/corevideo_frame_buffer.cc",
-        "objc/Framework/Classes/Video/corevideo_frame_buffer.h",
-      ]
-
-      deps = [
-        "../api:video_frame_api_i420",
-        "../common_video",
-        "../rtc_base:rtc_base_approved",
-      ]
-
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-
-      libs = [ "CoreVideo.framework" ]
-    }
-
-    rtc_static_library("video_toolbox_cc") {
-      visibility = [ ":videotoolbox_objc" ]
-      sources = [
-        "objc/Framework/Classes/VideoToolbox/helpers.cc",
-        "objc/Framework/Classes/VideoToolbox/helpers.h",
-        "objc/Framework/Classes/VideoToolbox/nalu_rewriter.cc",
-        "objc/Framework/Classes/VideoToolbox/nalu_rewriter.h",
-      ]
-      deps = [
-        "../common_video",
-        "../modules:module_api",
-        "../modules/video_coding:webrtc_h264",
-        "../rtc_base:rtc_base_approved",
-      ]
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-    }
-
-    rtc_static_library("videotoolbox_objc") {
-      sources = [
-        "objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm",
-        "objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm",
-        "objc/Framework/Classes/VideoToolbox/objc_video_decoder_factory.h",
-        "objc/Framework/Classes/VideoToolbox/objc_video_decoder_factory.mm",
-        "objc/Framework/Classes/VideoToolbox/objc_video_encoder_factory.h",
-        "objc/Framework/Classes/VideoToolbox/objc_video_encoder_factory.mm",
-        "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
-      ]
-
-      configs += [ "..:common_objc" ]
-
-      deps = [
-        ":common_objc",
-        ":video_objc",
-        ":video_toolbox_cc",
-        ":videotracksource_objc",
-        "../api/video_codecs:video_codecs_api",
-        "../common_video",
-        "../media:rtc_media",
-        "../media:rtc_media_base",
-        "../modules:module_api",
-        "../modules/video_coding:video_coding_utility",
-        "../modules/video_coding:webrtc_h264",
-        "../rtc_base:rtc_base_approved",
-        "../system_wrappers",
-      ]
-
-      libs = [
-        "CoreFoundation.framework",
-        "CoreMedia.framework",
-        "CoreVideo.framework",
-        "VideoToolbox.framework",
-      ]
-
-      if (!build_with_chromium && is_clang) {
-        # Suppress warnings from the Chromium Clang plugin
-        # (bugs.webrtc.org/163).
-        suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
-      }
-
-      if (rtc_build_libyuv) {
-        deps += [ "$rtc_libyuv_dir" ]
-        public_deps = [
-          "$rtc_libyuv_dir",
-        ]
-      } else {
-        # Need to add a directory normally exported by libyuv.
-        include_dirs = [ "$rtc_libyuv_dir/include" ]
-      }
-    }
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-magjed@webrtc.org
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gn=*
-per-file *.gni=*
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/AndroidManifest.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
- *  Copyright 2017 The WebRTC Project Authors. All rights reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
--->
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
-    package="org.webrtc">
-  <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="23" />
-</manifest>
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/BUILD.gn
+++ /dev/null
@@ -1,703 +0,0 @@
-# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS.  All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-import("../../webrtc.gni")
-import("//build/config/android/config.gni")
-import("//build/config/android/rules.gni")
-
-group("android") {
-  if (!build_with_chromium && is_android) {
-    public_deps = [
-      ":libjingle_peerconnection_datachannelonly_so",
-      ":libjingle_peerconnection_jni",
-      ":libjingle_peerconnection_so",
-      ":libwebrtc",
-    ]
-  }
-}
-
-config("libjingle_peerconnection_jni_warnings_config") {
-  # The warnings below are enabled by default. Since GN orders compiler flags
-  # for a target before flags from configs, the only way to disable such
-  # warnings is by having them in a separate config, loaded from the target.
-  if (!is_win) {
-    cflags = [
-      "-Wno-sign-compare",
-      "-Wno-unused-variable",
-    ]
-  }
-}
-
-generate_jni("generated_base_jni") {
-  sources = [
-    "src/java/org/webrtc/WebRtcClassLoader.java",
-  ]
-  jni_package = ""
-  jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
-}
-
-generate_jar_jni("generated_external_classes_jni") {
-  classes = [
-    "java/lang/Integer.class",
-    "java/lang/Double.class",
-    "java/lang/Long.class",
-    "java/lang/Boolean.class",
-    "java/math/BigInteger.class",
-    "java/lang/String.class",
-  ]
-  jni_package = ""
-  jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
-}
-
-rtc_source_set("base_jni") {
-  sources = [
-    "src/jni/androidhistogram_jni.cc",
-    "src/jni/class_loader.cc",
-    "src/jni/class_loader.h",
-    "src/jni/classreferenceholder.cc",
-    "src/jni/classreferenceholder.h",
-    "src/jni/jni_common.cc",
-    "src/jni/jni_generator_helper.cc",
-    "src/jni/jni_generator_helper.h",
-    "src/jni/jni_helpers.cc",
-    "src/jni/jni_helpers.h",
-    "src/jni/pc/audio_jni.h",
-    "src/jni/pc/media_jni.h",
-    "src/jni/pc/video_jni.h",
-  ]
-
-  deps = [
-    ":generated_base_jni",
-    ":generated_external_classes_jni",
-    "../../api:libjingle_peerconnection_api",
-    "../../api:optional",
-    "../../rtc_base:rtc_base",
-    "../../rtc_base:rtc_base_approved",
-    "../../system_wrappers:metrics_api",
-  ]
-
-  if (is_clang) {
-    # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
-    suppressed_configs += [
-      "//build/config/clang:extra_warnings",
-      "//build/config/clang:find_bad_constructs",
-    ]
-  }
-}
-
-rtc_static_library("audio_jni") {
-  sources = [
-    "src/jni/pc/audio_jni.cc",
-    "src/jni/pc/defaultaudioprocessingfactory_jni.cc",
-  ]
-
-  deps = [
-    ":base_jni",
-    "../../api/audio_codecs:builtin_audio_decoder_factory",
-    "../../api/audio_codecs:builtin_audio_encoder_factory",
-    "../../modules/audio_processing:audio_processing",
-    "../../rtc_base:rtc_base_approved",
-    "../../voice_engine:voice_engine",
-  ]
-}
-
-rtc_static_library("null_audio_jni") {
-  sources = [
-    "src/jni/pc/null_audio_jni.cc",
-  ]
-
-  deps = [
-    ":base_jni",
-  ]
-}
-
-generate_jni("generated_video_jni") {
-  sources = [
-    "api/org/webrtc/EncodedImage.java",
-    "api/org/webrtc/SurfaceTextureHelper.java",
-    "api/org/webrtc/VideoCodecStatus.java",
-    "api/org/webrtc/VideoDecoder.java",
-    "api/org/webrtc/VideoEncoder.java",
-    "api/org/webrtc/VideoFrame.java",
-    "api/org/webrtc/VideoSink.java",
-    "src/java/org/webrtc/VideoDecoderWrapper.java",
-    "src/java/org/webrtc/VideoEncoderWrapper.java",
-    "src/java/org/webrtc/WrappedNativeVideoDecoder.java",
-    "src/java/org/webrtc/WrappedNativeVideoEncoder.java",
-  ]
-  jni_package = ""
-  jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
-}
-
-rtc_static_library("video_jni") {
-  sources = [
-    "src/jni/androidmediacodeccommon.h",
-    "src/jni/androidmediadecoder_jni.cc",
-    "src/jni/androidmediadecoder_jni.h",
-    "src/jni/androidmediaencoder_jni.cc",
-    "src/jni/androidmediaencoder_jni.h",
-    "src/jni/androidvideotracksource.cc",
-    "src/jni/androidvideotracksource.h",
-    "src/jni/androidvideotracksource_jni.cc",
-    "src/jni/encodedimage.cc",
-    "src/jni/encodedimage.h",
-    "src/jni/hardwarevideoencoderfactory.cc",
-    "src/jni/jni_generator_helper.h",
-    "src/jni/nv12buffer_jni.cc",
-    "src/jni/nv21buffer_jni.cc",
-    "src/jni/pc/video_jni.cc",
-    "src/jni/surfacetexturehelper_jni.cc",
-    "src/jni/surfacetexturehelper_jni.h",
-    "src/jni/video_renderer_jni.cc",
-    "src/jni/videocodecinfo.cc",
-    "src/jni/videocodecinfo.h",
-    "src/jni/videocodecstatus.cc",
-    "src/jni/videocodecstatus.h",
-    "src/jni/videodecoderfactorywrapper.cc",
-    "src/jni/videodecoderfactorywrapper.h",
-    "src/jni/videodecoderfallback.cc",
-    "src/jni/videodecoderwrapper.cc",
-    "src/jni/videodecoderwrapper.h",
-    "src/jni/videoencoderfactorywrapper.cc",
-    "src/jni/videoencoderfactorywrapper.h",
-    "src/jni/videoencoderfallback.cc",
-    "src/jni/videoencoderwrapper.cc",
-    "src/jni/videoencoderwrapper.h",
-    "src/jni/videofilerenderer_jni.cc",
-    "src/jni/videoframe.cc",
-    "src/jni/videoframe.h",
-    "src/jni/videotrack_jni.cc",
-    "src/jni/vp8codec.cc",
-    "src/jni/vp9codec.cc",
-    "src/jni/wrapped_native_i420_buffer.cc",
-    "src/jni/wrapped_native_i420_buffer.h",
-    "src/jni/wrappednativecodec.cc",
-    "src/jni/wrappednativecodec.h",
-    "src/jni/yuvhelper.cc",
-  ]
-
-  configs += [ ":libjingle_peerconnection_jni_warnings_config" ]
-
-  if (is_clang) {
-    # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
-    suppressed_configs += [
-      "//build/config/clang:extra_warnings",
-      "//build/config/clang:find_bad_constructs",
-    ]
-  }
-
-  # TODO(jschuh): Bug 1348: fix this warning.
-  configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
-
-  if (is_win) {
-    cflags += [
-      "/wd4245",  # conversion from "int" to "size_t", signed/unsigned mismatch.
-      "/wd4389",  # signed/unsigned mismatch.
-    ]
-  }
-
-  deps = [
-    ":base_jni",
-    ":generated_video_jni",
-    ":peerconnection_jni",
-    "../..:webrtc_common",
-    "../../api:libjingle_peerconnection_api",
-    "../../api:video_frame_api",
-    "../../api/video_codecs:video_codecs_api",
-    "../../common_video:common_video",
-    "../../media:rtc_audio_video",
-    "../../media:rtc_media_base",
-    "../../modules:module_api",
-    "../../modules/utility:utility",
-    "../../modules/video_coding:codec_globals_headers",
-    "../../modules/video_coding:video_coding_utility",
-    "../../modules/video_coding:webrtc_vp8",
-    "../../modules/video_coding:webrtc_vp9",
-    "../../rtc_base:rtc_base",
-    "../../rtc_base:rtc_base_approved",
-    "../../rtc_base:rtc_task_queue",
-    "../../rtc_base:sequenced_task_checker",
-    "../../rtc_base:weak_ptr",
-    "../../system_wrappers:system_wrappers",
-  ]
-
-  if (rtc_build_libyuv) {
-    deps += [ "$rtc_libyuv_dir" ]
-    public_deps = [
-      "$rtc_libyuv_dir",
-    ]
-  } else {
-    # Need to add a directory normally exported by libyuv.
-    include_dirs = [ "$rtc_libyuv_dir/include" ]
-  }
-}
-
-rtc_static_library("null_video_jni") {
-  sources = [
-    "src/jni/pc/null_video_jni.cc",
-  ]
-
-  deps = [
-    ":base_jni",
-  ]
-}
-
-rtc_static_library("media_jni") {
-  sources = [
-    "src/jni/pc/media_jni.cc",
-  ]
-
-  deps = [
-    ":base_jni",
-    "../../api/video_codecs:video_codecs_api",
-    "../../call:call_interfaces",
-    "../../logging:rtc_event_log_api",
-    "../../media:rtc_audio_video",
-    "../../modules/audio_device:audio_device",
-    "../../modules/audio_processing:audio_processing",
-  ]
-
-  if (is_clang) {
-    # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
-    suppressed_configs += [
-      "//build/config/clang:extra_warnings",
-      "//build/config/clang:find_bad_constructs",
-    ]
-  }
-}
-
-rtc_static_library("null_media_jni") {
-  sources = [
-    "src/jni/pc/null_media_jni.cc",
-  ]
-
-  deps = [
-    ":base_jni",
-  ]
-
-  if (is_clang) {
-    # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
-    suppressed_configs += [
-      "//build/config/clang:extra_warnings",
-      "//build/config/clang:find_bad_constructs",
-    ]
-  }
-}
-
-generate_jni("generated_peerconnection_jni") {
-  sources = [
-    "api/org/webrtc/DataChannel.java",
-    "api/org/webrtc/IceCandidate.java",
-    "api/org/webrtc/MediaConstraints.java",
-    "api/org/webrtc/MediaStream.java",
-    "api/org/webrtc/MediaStreamTrack.java",
-    "api/org/webrtc/NetworkMonitor.java",
-    "api/org/webrtc/NetworkMonitorAutoDetect.java",
-    "api/org/webrtc/RTCStats.java",
-    "api/org/webrtc/RTCStatsCollectorCallback.java",
-    "api/org/webrtc/RTCStatsReport.java",
-    "api/org/webrtc/StatsObserver.java",
-    "api/org/webrtc/StatsReport.java",
-  ]
-  jni_package = ""
-  jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
-}
-
-rtc_static_library("peerconnection_jni") {
-  sources = [
-    "src/jni/androidnetworkmonitor_jni.h",
-    "src/jni/pc/androidnetworkmonitor_jni.cc",
-    "src/jni/pc/androidnetworkmonitor_jni.h",
-    "src/jni/pc/audiotrack_jni.cc",
-    "src/jni/pc/callsessionfilerotatinglogsink_jni.cc",
-    "src/jni/pc/datachannel.cc",
-    "src/jni/pc/datachannel.h",
-    "src/jni/pc/dtmfsender_jni.cc",
-    "src/jni/pc/java_native_conversion.cc",
-    "src/jni/pc/java_native_conversion.h",
-    "src/jni/pc/logging_jni.cc",
-    "src/jni/pc/mediaconstraints_jni.cc",
-    "src/jni/pc/mediaconstraints_jni.h",
-    "src/jni/pc/mediasource_jni.cc",
-    "src/jni/pc/mediastream_jni.cc",
-    "src/jni/pc/mediastreamtrack_jni.cc",
-    "src/jni/pc/ownedfactoryandthreads.cc",
-    "src/jni/pc/ownedfactoryandthreads.h",
-    "src/jni/pc/peerconnection_jni.cc",
-    "src/jni/pc/peerconnectionfactory_jni.cc",
-    "src/jni/pc/peerconnectionobserver_jni.cc",
-    "src/jni/pc/peerconnectionobserver_jni.h",
-    "src/jni/pc/rtcstatscollectorcallbackwrapper.cc",
-    "src/jni/pc/rtcstatscollectorcallbackwrapper.h",
-    "src/jni/pc/rtpreceiver_jni.cc",
-    "src/jni/pc/rtpreceiverobserver_jni.cc",
-    "src/jni/pc/rtpreceiverobserver_jni.h",
-    "src/jni/pc/rtpsender_jni.cc",
-    "src/jni/pc/sdpobserver_jni.h",
-    "src/jni/pc/statsobserver_jni.cc",
-    "src/jni/pc/statsobserver_jni.h",
-    "src/jni/pc/turncustomizer_jni.cc",
-  ]
-
-  configs += [ ":libjingle_peerconnection_jni_warnings_config" ]
-
-  if (is_clang) {
-    # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
-    suppressed_configs += [
-      "//build/config/clang:extra_warnings",
-      "//build/config/clang:find_bad_constructs",
-    ]
-  }
-
-  # TODO(jschuh): Bug 1348: fix this warning.
-  configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
-
-  if (is_win) {
-    cflags += [
-      "/wd4245",  # conversion from "int" to "size_t", signed/unsigned mismatch.
-      "/wd4389",  # signed/unsigned mismatch.
-    ]
-  }
-
-  deps = [
-    ":base_jni",
-    ":generated_external_classes_jni",
-    ":generated_peerconnection_jni",
-    "../..:webrtc_common",
-    "../../api/video_codecs:video_codecs_api",
-    "../../media:rtc_data",
-    "../../media:rtc_media_base",
-    "../../modules/audio_device:audio_device",
-    "../../modules/utility:utility",
-    "../../pc:peerconnection",
-    "../../rtc_base:rtc_base",
-    "../../rtc_base:rtc_base_approved",
-    "../../rtc_base:rtc_task_queue",
-    "../../system_wrappers:system_wrappers",
-  ]
-}
-
-rtc_static_library("libjingle_peerconnection_jni") {
-  public_deps = [
-    ":audio_jni",
-    ":base_jni",
-    ":media_jni",
-    ":peerconnection_jni",
-    ":video_jni",
-    "../../pc:create_pc_factory",
-  ]
-}
-
-generate_jni("generated_metrics_jni") {
-  sources = [
-    "api/org/webrtc/Metrics.java",
-  ]
-  jni_package = ""
-  jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
-}
-
-rtc_static_library("libjingle_peerconnection_metrics_default_jni") {
-  sources = [
-    "src/jni/androidmetrics_jni.cc",
-  ]
-
-  configs += [ ":libjingle_peerconnection_jni_warnings_config" ]
-
-  deps = [
-    ":base_jni",
-    ":generated_metrics_jni",
-    ":peerconnection_jni",
-    "../../pc:peerconnection",
-    "../../system_wrappers",
-    "../../system_wrappers:field_trial_default",
-    "../../system_wrappers:metrics_default",
-  ]
-}
-
-# The modular build targets can be used to build WebRTC with different
-# functionalities. The users can choose either the real implemenation or the
-# null implementation of the audio/video modules based on their requirments.
-rtc_shared_library("libjingle_peerconnection_datachannelonly_so") {
-  sources = [
-    "src/jni/jni_onload.cc",
-  ]
-
-  suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
-  configs += [ "//build/config/android:hide_all_but_jni" ]
-
-  deps = [
-    ":base_jni",
-    ":libjingle_peerconnection_metrics_default_jni",
-    ":null_audio_jni",
-    ":null_media_jni",
-    ":null_video_jni",
-    ":peerconnection_jni",
-    "../../pc:peerconnection",
-    "../../rtc_base:rtc_base",
-    "../../rtc_base:rtc_base_approved",
-  ]
-  output_extension = "so"
-}
-
-rtc_shared_library("libjingle_peerconnection_so") {
-  sources = [
-    "src/jni/jni_onload.cc",
-  ]
-
-  suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
-  configs += [ "//build/config/android:hide_all_but_jni" ]
-
-  deps = [
-    ":libjingle_peerconnection_jni",
-    ":libjingle_peerconnection_metrics_default_jni",
-    "../../pc:libjingle_peerconnection",
-    "../../rtc_base:rtc_base",
-  ]
-  output_extension = "so"
-}
-
-template("java8_dist_jar") {
-  _deps = []
-  _output_jars = []
-  foreach(dep, invoker.deps) {
-    _dep_name = get_label_info(dep, "name")
-    _source_jar = get_label_info(dep, "target_gen_dir") + "/" + _dep_name +
-                  "__compile_java.javac.jar"
-    _output_jar = "$target_gen_dir/" + _dep_name + ".jar"
-
-    _copy_dep = dep + "__compile_java__javac"
-    _copy_target_name = "${target_name}_${_dep_name}_copy"
-
-    copy(_copy_target_name) {
-      sources = [
-        _source_jar,
-      ]
-      outputs = [
-        _output_jar,
-      ]
-      deps = [
-        _copy_dep,
-      ]
-    }
-    _deps += [ ":" + _copy_target_name ]
-    _output_jars += [ _output_jar ]
-  }
-
-  action(target_name) {
-    forward_variables_from(invoker, [ "data" ])
-    script = "//build/android/gyp/create_dist_jar.py"
-    depfile = "$target_gen_dir/$target_name.d"
-    deps = _deps
-
-    outputs = [
-      invoker.output,
-    ]
-
-    args = [
-      "--depfile",
-      rebase_path(depfile, root_build_dir),
-      "--output",
-      rebase_path(invoker.output, root_build_dir),
-    ]
-
-    foreach(output_jar, _output_jars) {
-      args += [
-        "--inputs",
-        rebase_path(output_jar, root_build_dir),
-      ]
-    }
-  }
-}
-
-java8_dist_jar("libwebrtc") {
-  _target_dir_name = get_label_info(":$target_name", "dir")
-  output = "${root_out_dir}/lib.java${_target_dir_name}/${target_name}.jar"
-
-  deps = [
-    ":libjingle_peerconnection_java",
-    ":libjingle_peerconnection_metrics_default_java",
-    "../../modules/audio_device:audio_device_java",
-    "../../rtc_base:base_java",
-  ]
-}
-
-rtc_android_library("libjingle_peerconnection_java") {
-  java_files = [
-    "src/java/org/webrtc/VideoDecoderWrapper.java",
-    "api/org/webrtc/AudioProcessingFactory.java",
-    "api/org/webrtc/AudioSource.java",
-    "api/org/webrtc/AudioTrack.java",
-    "api/org/webrtc/CallSessionFileRotatingLogSink.java",
-    "api/org/webrtc/Camera1Capturer.java",
-    "api/org/webrtc/Camera1Enumerator.java",
-    "api/org/webrtc/Camera2Capturer.java",
-    "api/org/webrtc/Camera2Enumerator.java",
-    "api/org/webrtc/CameraEnumerationAndroid.java",
-    "api/org/webrtc/CameraEnumerator.java",
-    "api/org/webrtc/CameraVideoCapturer.java",
-    "api/org/webrtc/DataChannel.java",
-    "api/org/webrtc/DefaultAudioProcessingFactory.java",
-    "api/org/webrtc/DefaultVideoDecoderFactory.java",
-    "api/org/webrtc/DefaultVideoEncoderFactory.java",
-    "api/org/webrtc/DtmfSender.java",
-    "api/org/webrtc/EglBase.java",
-    "api/org/webrtc/EglRenderer.java",
-    "api/org/webrtc/EncodedImage.java",
-    "api/org/webrtc/FileVideoCapturer.java",
-    "api/org/webrtc/GlRectDrawer.java",
-    "api/org/webrtc/GlShader.java",
-    "api/org/webrtc/GlTextureFrameBuffer.java",
-    "api/org/webrtc/GlUtil.java",
-    "api/org/webrtc/HardwareVideoDecoderFactory.java",
-    "api/org/webrtc/HardwareVideoEncoderFactory.java",
-    "api/org/webrtc/IceCandidate.java",
-    "api/org/webrtc/JavaI420Buffer.java",
-    "api/org/webrtc/MediaCodecVideoDecoder.java",
-    "api/org/webrtc/MediaCodecVideoEncoder.java",
-    "api/org/webrtc/MediaConstraints.java",
-    "api/org/webrtc/MediaSource.java",
-    "api/org/webrtc/MediaStream.java",
-    "api/org/webrtc/MediaStreamTrack.java",
-    "api/org/webrtc/NativeLibraryLoader.java",
-    "api/org/webrtc/NetworkMonitor.java",
-    "api/org/webrtc/NetworkMonitorAutoDetect.java",
-    "api/org/webrtc/PeerConnection.java",
-    "api/org/webrtc/PeerConnectionFactory.java",
-    "api/org/webrtc/PostProcessingFactory.java",
-    "api/org/webrtc/RendererCommon.java",
-    "api/org/webrtc/RTCStats.java",
-    "api/org/webrtc/RTCStatsCollectorCallback.java",
-    "api/org/webrtc/RTCStatsReport.java",
-    "api/org/webrtc/RtpParameters.java",
-    "api/org/webrtc/RtpReceiver.java",
-    "api/org/webrtc/RtpSender.java",
-    "api/org/webrtc/ScreenCapturerAndroid.java",
-    "api/org/webrtc/SdpObserver.java",
-    "api/org/webrtc/SessionDescription.java",
-    "api/org/webrtc/SoftwareVideoDecoderFactory.java",
-    "api/org/webrtc/SoftwareVideoEncoderFactory.java",
-    "api/org/webrtc/StatsObserver.java",
-    "api/org/webrtc/StatsReport.java",
-    "api/org/webrtc/SurfaceTextureHelper.java",
-    "api/org/webrtc/SurfaceEglRenderer.java",
-    "api/org/webrtc/SurfaceViewRenderer.java",
-    "api/org/webrtc/TurnCustomizer.java",
-    "api/org/webrtc/VideoCapturer.java",
-    "api/org/webrtc/VideoCodecInfo.java",
-    "api/org/webrtc/VideoCodecStatus.java",
-    "api/org/webrtc/VideoDecoder.java",
-    "api/org/webrtc/VideoDecoderFactory.java",
-    "api/org/webrtc/VideoDecoderFallback.java",
-    "api/org/webrtc/VideoEncoder.java",
-    "api/org/webrtc/VideoEncoderFactory.java",
-    "api/org/webrtc/VideoEncoderFallback.java",
-    "api/org/webrtc/VideoFileRenderer.java",
-    "api/org/webrtc/VideoFrame.java",
-    "api/org/webrtc/VideoFrameDrawer.java",
-    "api/org/webrtc/VideoRenderer.java",
-    "api/org/webrtc/VideoSink.java",
-    "api/org/webrtc/VideoSource.java",
-    "api/org/webrtc/VideoTrack.java",
-    "api/org/webrtc/YuvConverter.java",
-    "api/org/webrtc/YuvHelper.java",
-    "src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
-    "src/java/org/webrtc/BaseBitrateAdjuster.java",
-    "src/java/org/webrtc/BitrateAdjuster.java",
-    "src/java/org/webrtc/CalledByNative.java",
-    "src/java/org/webrtc/Camera1Session.java",
-    "src/java/org/webrtc/Camera2Session.java",
-    "src/java/org/webrtc/CameraCapturer.java",
-    "src/java/org/webrtc/CameraSession.java",
-    "src/java/org/webrtc/WebRtcClassLoader.java",
-    "src/java/org/webrtc/DynamicBitrateAdjuster.java",
-    "src/java/org/webrtc/EglBase10.java",
-    "src/java/org/webrtc/EglBase14.java",
-    "src/java/org/webrtc/FramerateBitrateAdjuster.java",
-    "src/java/org/webrtc/HardwareVideoDecoder.java",
-    "src/java/org/webrtc/HardwareVideoEncoder.java",
-    "src/java/org/webrtc/Histogram.java",
-    "src/java/org/webrtc/JniCommon.java",
-    "src/java/org/webrtc/MediaCodecUtils.java",
-    "src/java/org/webrtc/NativeClassQualifiedName.java",
-    "src/java/org/webrtc/NativeLibrary.java",
-    "src/java/org/webrtc/NV12Buffer.java",
-    "src/java/org/webrtc/NV21Buffer.java",
-    "src/java/org/webrtc/TextureBufferImpl.java",
-    "src/java/org/webrtc/VideoCodecType.java",
-    "src/java/org/webrtc/VideoEncoderWrapper.java",
-    "src/java/org/webrtc/WrappedNativeI420Buffer.java",
-    "src/java/org/webrtc/WrappedNativeVideoEncoder.java",
-    "src/java/org/webrtc/WrappedNativeVideoDecoder.java",
-    "src/java/org/webrtc/VP8Encoder.java",
-    "src/java/org/webrtc/VP8Decoder.java",
-    "src/java/org/webrtc/VP9Encoder.java",
-    "src/java/org/webrtc/VP9Decoder.java",
-  ]
-
-  deps = [
-    "../../modules/audio_device:audio_device_java",
-    "../../rtc_base:base_java",
-  ]
-}
-
-rtc_android_library("libjingle_peerconnection_metrics_default_java") {
-  java_files = [ "api/org/webrtc/Metrics.java" ]
-
-  deps = [
-    ":libjingle_peerconnection_java",
-    "../../rtc_base:base_java",
-  ]
-}
-
-if (rtc_include_tests) {
-  rtc_instrumentation_test_apk("libjingle_peerconnection_android_unittest") {
-    apk_name = "libjingle_peerconnection_android_unittest"
-    android_manifest = "instrumentationtests/AndroidManifest.xml"
-
-    java_files = [
-      "instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java",
-      "instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java",
-      "instrumentationtests/src/org/webrtc/Camera2CapturerTest.java",
-      "instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java",
-      "instrumentationtests/src/org/webrtc/DefaultAudioProcessingFactoryTest.java",
-      "instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java",
-      "instrumentationtests/src/org/webrtc/EglRendererTest.java",
-      "instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java",
-      "instrumentationtests/src/org/webrtc/GlRectDrawerTest.java",
-      "instrumentationtests/src/org/webrtc/HardwareVideoDecoderTest.java",
-      "instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java",
-      "instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java",
-      "instrumentationtests/src/org/webrtc/NetworkMonitorTest.java",
-      "instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java",
-      "instrumentationtests/src/org/webrtc/PeerConnectionTest.java",
-      "instrumentationtests/src/org/webrtc/RendererCommonTest.java",
-      "instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java",
-      "instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java",
-      "instrumentationtests/src/org/webrtc/VideoFileRendererTest.java",
-      "instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java",
-      "instrumentationtests/src/org/webrtc/YuvHelperTest.java",
-    ]
-
-    data = [
-      "../../sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m",
-    ]
-
-    deps = [
-      "../../rtc_base:base_java",
-      "../../sdk/android:libjingle_peerconnection_java",
-      "../../sdk/android:libjingle_peerconnection_metrics_default_java",
-      "//base:base_java",
-      "//base:base_java_test_support",
-      "//third_party/android_support_test_runner:rules_java",
-      "//third_party/android_support_test_runner:runner_java",
-      "//third_party/junit",
-    ]
-
-    shared_libraries = [ "../../sdk/android:libjingle_peerconnection_so" ]
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-glaznev@webrtc.org
-magjed@webrtc.org
-sakal@webrtc.org
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/PRESUBMIT.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS.  All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-def CheckChangeOnUpload(input_api, output_api):
-  results = []
-  results.extend(CheckPatchFormatted(input_api, output_api))
-  return results
-
-def CheckPatchFormatted(input_api, output_api):
-  import git_cl
-  cmd = ['cl', 'format', '--dry-run', input_api.PresubmitLocalPath()]
-  code, _ = git_cl.RunGitWithCode(cmd, suppress_stderr=True)
-  if code == 2:
-    short_path = input_api.basename(input_api.PresubmitLocalPath())
-    full_path = input_api.os_path.relpath(input_api.PresubmitLocalPath(),
-                                          input_api.change.RepositoryRoot())
-    return [output_api.PresubmitPromptWarning(
-      'The %s directory requires source formatting. '
-      'Please run git cl format %s' %
-      (short_path, full_path))]
-  # As this is just a warning, ignore all other errors if the user
-  # happens to have a broken clang-format, doesn't use git, etc etc.
-  return []
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/README
+++ /dev/null
@@ -1,11 +0,0 @@
-This directory holds a Java implementation of the webrtc::PeerConnection API, as
-well as the JNI glue C++ code that lets the Java implementation reuse the C++
-implementation of the same API.
-
-To build the Java API and related tests, generate GN projects with:
---args='target_os="android"'
-
-To use the Java API, start by looking at the public interface of
-org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
-
-To understand the implementation of the API, see the native code in jni/.
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/** Factory for creating webrtc::AudioProcessing instances. */
-public interface AudioProcessingFactory {
-  /**
-   * Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it.
-   * The caller takes ownership of the object.
-   */
-  public long createNative();
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/AudioSource.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/**
- * Java wrapper for a C++ AudioSourceInterface.  Used as the source for one or
- * more {@code AudioTrack} objects.
- */
-public class AudioSource extends MediaSource {
-  public AudioSource(long nativeSource) {
-    super(nativeSource);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/AudioTrack.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/** Java wrapper for a C++ AudioTrackInterface */
-public class AudioTrack extends MediaStreamTrack {
-  public AudioTrack(long nativeTrack) {
-    super(nativeTrack);
-  }
-
-  /** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
-   *  0 to 10.
-   */
-  public void setVolume(double volume) {
-    nativeSetVolume(super.nativeTrack, volume);
-  }
-
-  private static native void nativeSetVolume(long nativeTrack, double volume);
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-public class CallSessionFileRotatingLogSink {
-  private long nativeSink;
-
-  public static byte[] getLogData(String dirPath) {
-    return nativeGetLogData(dirPath);
-  }
-
-  public CallSessionFileRotatingLogSink(
-      String dirPath, int maxFileSize, Logging.Severity severity) {
-    nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
-  }
-
-  public void dispose() {
-    if (nativeSink != 0) {
-      nativeDeleteSink(nativeSink);
-      nativeSink = 0;
-    }
-  }
-
-  private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
-  private static native void nativeDeleteSink(long nativeSink);
-  private static native byte[] nativeGetLogData(String dirPath);
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera1Capturer.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.content.Context;
-import android.media.MediaRecorder;
-
-public class Camera1Capturer extends CameraCapturer {
-  private final boolean captureToTexture;
-
-  public Camera1Capturer(
-      String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
-    super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
-
-    this.captureToTexture = captureToTexture;
-  }
-
-  @Override
-  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
-      CameraSession.Events events, Context applicationContext,
-      SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraName,
-      int width, int height, int framerate) {
-    Camera1Session.create(createSessionCallback, events,
-        captureToTexture || (mediaRecorder != null), applicationContext, surfaceTextureHelper,
-        mediaRecorder, Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.os.SystemClock;
-import java.util.ArrayList;
-import java.util.List;
-import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
-
-@SuppressWarnings("deprecation")
-public class Camera1Enumerator implements CameraEnumerator {
-  private final static String TAG = "Camera1Enumerator";
-  // Each entry contains the supported formats for corresponding camera index. The formats for all
-  // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
-  // reference.
-  private static List<List<CaptureFormat>> cachedSupportedFormats;
-
-  private final boolean captureToTexture;
-
-  public Camera1Enumerator() {
-    this(true /* captureToTexture */);
-  }
-
-  public Camera1Enumerator(boolean captureToTexture) {
-    this.captureToTexture = captureToTexture;
-  }
-
-  // Returns device names that can be used to create a new VideoCapturerAndroid.
-  @Override
-  public String[] getDeviceNames() {
-    ArrayList<String> namesList = new ArrayList<>();
-    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
-      String name = getDeviceName(i);
-      if (name != null) {
-        namesList.add(name);
-        Logging.d(TAG, "Index: " + i + ". " + name);
-      } else {
-        Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
-      }
-    }
-    String[] namesArray = new String[namesList.size()];
-    return namesList.toArray(namesArray);
-  }
-
-  @Override
-  public boolean isFrontFacing(String deviceName) {
-    android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
-    return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
-  }
-
-  @Override
-  public boolean isBackFacing(String deviceName) {
-    android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
-    return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
-  }
-
-  @Override
-  public List<CaptureFormat> getSupportedFormats(String deviceName) {
-    return getSupportedFormats(getCameraIndex(deviceName));
-  }
-
-  @Override
-  public CameraVideoCapturer createCapturer(
-      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
-    return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
-  }
-
-  private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
-    android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
-    try {
-      android.hardware.Camera.getCameraInfo(index, info);
-    } catch (Exception e) {
-      Logging.e(TAG, "getCameraInfo failed on index " + index, e);
-      return null;
-    }
-    return info;
-  }
-
-  static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
-    if (cachedSupportedFormats == null) {
-      cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
-      for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
-        cachedSupportedFormats.add(enumerateFormats(i));
-      }
-    }
-    return cachedSupportedFormats.get(cameraId);
-  }
-
-  private static List<CaptureFormat> enumerateFormats(int cameraId) {
-    Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
-    final long startTimeMs = SystemClock.elapsedRealtime();
-    final android.hardware.Camera.Parameters parameters;
-    android.hardware.Camera camera = null;
-    try {
-      Logging.d(TAG, "Opening camera with index " + cameraId);
-      camera = android.hardware.Camera.open(cameraId);
-      parameters = camera.getParameters();
-    } catch (RuntimeException e) {
-      Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
-      return new ArrayList<CaptureFormat>();
-    } finally {
-      if (camera != null) {
-        camera.release();
-      }
-    }
-
-    final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
-    try {
-      int minFps = 0;
-      int maxFps = 0;
-      final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
-      if (listFpsRange != null) {
-        // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
-        // corresponding to the highest fps.
-        final int[] range = listFpsRange.get(listFpsRange.size() - 1);
-        minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
-        maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
-      }
-      for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
-        formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
-      }
-    } catch (Exception e) {
-      Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
-    }
-
-    final long endTimeMs = SystemClock.elapsedRealtime();
-    Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
-            + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
-    return formatList;
-  }
-
-  // Convert from android.hardware.Camera.Size to Size.
-  static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
-    final List<Size> sizes = new ArrayList<Size>();
-    for (android.hardware.Camera.Size size : cameraSizes) {
-      sizes.add(new Size(size.width, size.height));
-    }
-    return sizes;
-  }
-
-  // Convert from int[2] to CaptureFormat.FramerateRange.
-  static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
-    final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
-    for (int[] range : arrayRanges) {
-      ranges.add(new CaptureFormat.FramerateRange(
-          range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
-          range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
-    }
-    return ranges;
-  }
-
-  // Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
-  // if no such camera can be found.
-  static int getCameraIndex(String deviceName) {
-    Logging.d(TAG, "getCameraIndex: " + deviceName);
-    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
-      if (deviceName.equals(getDeviceName(i))) {
-        return i;
-      }
-    }
-    throw new IllegalArgumentException("No such camera: " + deviceName);
-  }
-
-  // Returns the name of the camera with camera index. Returns null if the
-  // camera can not be used.
-  static String getDeviceName(int index) {
-    android.hardware.Camera.CameraInfo info = getCameraInfo(index);
-    if (info == null) {
-      return null;
-    }
-
-    String facing =
-        (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
-    return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera2Capturer.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.annotation.TargetApi;
-import android.content.Context;
-import android.hardware.camera2.CameraManager;
-import android.media.MediaRecorder;
-
-@TargetApi(21)
-public class Camera2Capturer extends CameraCapturer {
-  private final Context context;
-  private final CameraManager cameraManager;
-
-  public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
-    super(cameraName, eventsHandler, new Camera2Enumerator(context));
-
-    this.context = context;
-    cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
-  }
-
-  @Override
-  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
-      CameraSession.Events events, Context applicationContext,
-      SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecoder, String cameraName,
-      int width, int height, int framerate) {
-    Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
-        surfaceTextureHelper, mediaRecoder, cameraName, width, height, framerate);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java
+++ /dev/null
@@ -1,248 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
-
-import android.annotation.TargetApi;
-import android.content.Context;
-import android.graphics.Rect;
-import android.graphics.SurfaceTexture;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CameraManager;
-import android.hardware.camera2.CameraMetadata;
-import android.hardware.camera2.params.StreamConfigurationMap;
-import android.os.Build;
-import android.os.SystemClock;
-import android.util.AndroidException;
-import android.util.Range;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-@TargetApi(21)
-public class Camera2Enumerator implements CameraEnumerator {
-  private final static String TAG = "Camera2Enumerator";
-  private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
-
-  // Each entry contains the supported formats for a given camera index. The formats are enumerated
-  // lazily in getSupportedFormats(), and cached for future reference.
-  private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
-      new HashMap<String, List<CaptureFormat>>();
-
-  final Context context;
-  final CameraManager cameraManager;
-
-  public Camera2Enumerator(Context context) {
-    this.context = context;
-    this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
-  }
-
-  @Override
-  public String[] getDeviceNames() {
-    try {
-      return cameraManager.getCameraIdList();
-      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
-      // catch statement with an Exception from a newer API, even if the code is never executed.
-      // https://code.google.com/p/android/issues/detail?id=209129
-    } catch (/* CameraAccessException */ AndroidException e) {
-      Logging.e(TAG, "Camera access exception: " + e);
-      return new String[] {};
-    }
-  }
-
-  @Override
-  public boolean isFrontFacing(String deviceName) {
-    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
-
-    return characteristics != null
-        && characteristics.get(CameraCharacteristics.LENS_FACING)
-        == CameraMetadata.LENS_FACING_FRONT;
-  }
-
-  @Override
-  public boolean isBackFacing(String deviceName) {
-    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
-
-    return characteristics != null
-        && characteristics.get(CameraCharacteristics.LENS_FACING)
-        == CameraMetadata.LENS_FACING_BACK;
-  }
-
-  @Override
-  public List<CaptureFormat> getSupportedFormats(String deviceName) {
-    return getSupportedFormats(context, deviceName);
-  }
-
-  @Override
-  public CameraVideoCapturer createCapturer(
-      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
-    return new Camera2Capturer(context, deviceName, eventsHandler);
-  }
-
-  private CameraCharacteristics getCameraCharacteristics(String deviceName) {
-    try {
-      return cameraManager.getCameraCharacteristics(deviceName);
-      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
-      // catch statement with an Exception from a newer API, even if the code is never executed.
-      // https://code.google.com/p/android/issues/detail?id=209129
-    } catch (/* CameraAccessException */ AndroidException e) {
-      Logging.e(TAG, "Camera access exception: " + e);
-      return null;
-    }
-  }
-
-  /**
-   * Checks if API is supported and all cameras have better than legacy support.
-   */
-  public static boolean isSupported(Context context) {
-    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
-      return false;
-    }
-
-    CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
-    try {
-      String[] cameraIds = cameraManager.getCameraIdList();
-      for (String id : cameraIds) {
-        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
-        if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
-            == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
-          return false;
-        }
-      }
-      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
-      // catch statement with an Exception from a newer API, even if the code is never executed.
-      // https://code.google.com/p/android/issues/detail?id=209129
-    } catch (/* CameraAccessException */ AndroidException e) {
-      Logging.e(TAG, "Camera access exception: " + e);
-      return false;
-    }
-    return true;
-  }
-
-  static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
-    if (fpsRanges.length == 0) {
-      return 1000;
-    }
-    return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
-  }
-
-  static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
-    final StreamConfigurationMap streamMap =
-        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
-    final int supportLevel =
-        cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
-
-    final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
-    final List<Size> sizes = convertSizes(nativeSizes);
-
-    // Video may be stretched pre LMR1 on legacy implementations.
-    // Filter out formats that have different aspect ratio than the sensor array.
-    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
-        && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
-      final Rect activeArraySize =
-          cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
-      final ArrayList<Size> filteredSizes = new ArrayList<Size>();
-
-      for (Size size : sizes) {
-        if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
-          filteredSizes.add(size);
-        }
-      }
-
-      return filteredSizes;
-    } else {
-      return sizes;
-    }
-  }
-
-  static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
-    return getSupportedFormats(
-        (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
-  }
-
-  static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
-    synchronized (cachedSupportedFormats) {
-      if (cachedSupportedFormats.containsKey(cameraId)) {
-        return cachedSupportedFormats.get(cameraId);
-      }
-
-      Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
-      final long startTimeMs = SystemClock.elapsedRealtime();
-
-      final CameraCharacteristics cameraCharacteristics;
-      try {
-        cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
-      } catch (Exception ex) {
-        Logging.e(TAG, "getCameraCharacteristics(): " + ex);
-        return new ArrayList<CaptureFormat>();
-      }
-
-      final StreamConfigurationMap streamMap =
-          cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
-
-      Range<Integer>[] fpsRanges =
-          cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
-      List<CaptureFormat.FramerateRange> framerateRanges =
-          convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
-      List<Size> sizes = getSupportedSizes(cameraCharacteristics);
-
-      int defaultMaxFps = 0;
-      for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
-        defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
-      }
-
-      final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
-      for (Size size : sizes) {
-        long minFrameDurationNs = 0;
-        try {
-          minFrameDurationNs = streamMap.getOutputMinFrameDuration(
-              SurfaceTexture.class, new android.util.Size(size.width, size.height));
-        } catch (Exception e) {
-          // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
-        }
-        final int maxFps = (minFrameDurationNs == 0)
-            ? defaultMaxFps
-            : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
-        formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
-        Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
-      }
-
-      cachedSupportedFormats.put(cameraId, formatList);
-      final long endTimeMs = SystemClock.elapsedRealtime();
-      Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
-              + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
-      return formatList;
-    }
-  }
-
-  // Convert from android.util.Size to Size.
-  private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
-    final List<Size> sizes = new ArrayList<Size>();
-    for (android.util.Size size : cameraSizes) {
-      sizes.add(new Size(size.getWidth(), size.getHeight()));
-    }
-    return sizes;
-  }
-
-  // Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
-  static List<CaptureFormat.FramerateRange> convertFramerates(
-      Range<Integer>[] arrayRanges, int unitFactor) {
-    final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
-    for (Range<Integer> range : arrayRanges) {
-      ranges.add(new CaptureFormat.FramerateRange(
-          range.getLower() * unitFactor, range.getUpper() * unitFactor));
-    }
-    return ranges;
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import static java.lang.Math.abs;
-
-import android.graphics.ImageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-@SuppressWarnings("deprecation")
-public class CameraEnumerationAndroid {
-  private final static String TAG = "CameraEnumerationAndroid";
-
-  static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
-      // 0, Unknown resolution
-      new Size(160, 120), // 1, QQVGA
-      new Size(240, 160), // 2, HQVGA
-      new Size(320, 240), // 3, QVGA
-      new Size(400, 240), // 4, WQVGA
-      new Size(480, 320), // 5, HVGA
-      new Size(640, 360), // 6, nHD
-      new Size(640, 480), // 7, VGA
-      new Size(768, 480), // 8, WVGA
-      new Size(854, 480), // 9, FWVGA
-      new Size(800, 600), // 10, SVGA
-      new Size(960, 540), // 11, qHD
-      new Size(960, 640), // 12, DVGA
-      new Size(1024, 576), // 13, WSVGA
-      new Size(1024, 600), // 14, WVSGA
-      new Size(1280, 720), // 15, HD
-      new Size(1280, 1024), // 16, SXGA
-      new Size(1920, 1080), // 17, Full HD
-      new Size(1920, 1440), // 18, Full HD 4:3
-      new Size(2560, 1440), // 19, QHD
-      new Size(3840, 2160) // 20, UHD
-      ));
-
-  public static class CaptureFormat {
-    // Class to represent a framerate range. The framerate varies because of lightning conditions.
-    // The values are multiplied by 1000, so 1000 represents one frame per second.
-    public static class FramerateRange {
-      public int min;
-      public int max;
-
-      public FramerateRange(int min, int max) {
-        this.min = min;
-        this.max = max;
-      }
-
-      @Override
-      public String toString() {
-        return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
-      }
-
-      @Override
-      public boolean equals(Object other) {
-        if (!(other instanceof FramerateRange)) {
-          return false;
-        }
-        final FramerateRange otherFramerate = (FramerateRange) other;
-        return min == otherFramerate.min && max == otherFramerate.max;
-      }
-
-      @Override
-      public int hashCode() {
-        // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
-        return 1 + 65537 * min + max;
-      }
-    }
-
-    public final int width;
-    public final int height;
-    public final FramerateRange framerate;
-
-    // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
-    // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
-    // all imageFormats.
-    public final int imageFormat = ImageFormat.NV21;
-
-    public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
-      this.width = width;
-      this.height = height;
-      this.framerate = new FramerateRange(minFramerate, maxFramerate);
-    }
-
-    public CaptureFormat(int width, int height, FramerateRange framerate) {
-      this.width = width;
-      this.height = height;
-      this.framerate = framerate;
-    }
-
-    // Calculates the frame size of this capture format.
-    public int frameSize() {
-      return frameSize(width, height, imageFormat);
-    }
-
-    // Calculates the frame size of the specified image format. Currently only
-    // supporting ImageFormat.NV21.
-    // The size is width * height * number of bytes per pixel.
-    // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
-    public static int frameSize(int width, int height, int imageFormat) {
-      if (imageFormat != ImageFormat.NV21) {
-        throw new UnsupportedOperationException("Don't know how to calculate "
-            + "the frame size of non-NV21 image formats.");
-      }
-      return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
-    }
-
-    @Override
-    public String toString() {
-      return width + "x" + height + "@" + framerate;
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof CaptureFormat)) {
-        return false;
-      }
-      final CaptureFormat otherFormat = (CaptureFormat) other;
-      return width == otherFormat.width && height == otherFormat.height
-          && framerate.equals(otherFormat.framerate);
-    }
-
-    @Override
-    public int hashCode() {
-      return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
-    }
-  }
-
-  // Helper class for finding the closest supported format for the two functions below. It creates a
-  // comparator based on the difference to some requested parameters, where the element with the
-  // minimum difference is the element that is closest to the requested parameters.
-  private static abstract class ClosestComparator<T> implements Comparator<T> {
-    // Difference between supported and requested parameter.
-    abstract int diff(T supportedParameter);
-
-    @Override
-    public int compare(T t1, T t2) {
-      return diff(t1) - diff(t2);
-    }
-  }
-
-  // Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
-  // lower bound, to allow the framerate to fluctuate based on lightning conditions.
-  public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
-      List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
-    return Collections.min(
-        supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
-          // Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
-          // from requested.
-          private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
-          private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
-          private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
-
-          // Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
-          private static final int MIN_FPS_THRESHOLD = 8000;
-          private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
-          private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
-
-          // Use one weight for small |value| less than |threshold|, and another weight above.
-          private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
-            return (value < threshold) ? value * lowWeight
-                                       : threshold * lowWeight + (value - threshold) * highWeight;
-          }
-
-          @Override
-          int diff(CaptureFormat.FramerateRange range) {
-            final int minFpsError = progressivePenalty(
-                range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
-            final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
-                MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
-            return minFpsError + maxFpsError;
-          }
-        });
-  }
-
-  public static Size getClosestSupportedSize(
-      List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
-    return Collections.min(supportedSizes, new ClosestComparator<Size>() {
-      @Override
-      int diff(Size size) {
-        return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
-      }
-    });
-  }
-
-  // Helper method for camera classes.
-  static void reportCameraResolution(Histogram histogram, Size resolution) {
-    int index = COMMON_RESOLUTIONS.indexOf(resolution);
-    // 0 is reserved for unknown resolution, so add 1.
-    // indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
-    histogram.addSample(index + 1);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CameraEnumerator.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
-
-import java.util.List;
-
-public interface CameraEnumerator {
-  public String[] getDeviceNames();
-  public boolean isFrontFacing(String deviceName);
-  public boolean isBackFacing(String deviceName);
-  public List<CaptureFormat> getSupportedFormats(String deviceName);
-
-  public CameraVideoCapturer createCapturer(
-      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.media.MediaRecorder;
-
-/**
- * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
- * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
- * class for detecting camera freezes.
- */
-public interface CameraVideoCapturer extends VideoCapturer {
-  /**
-   * Camera events handler - can be used to be notifed about camera events. The callbacks are
-   * executed from an arbitrary thread.
-   */
-  public interface CameraEventsHandler {
-    // Camera error handler - invoked when camera can not be opened
-    // or any camera exception happens on camera thread.
-    void onCameraError(String errorDescription);
-
-    // Called when camera is disconnected.
-    void onCameraDisconnected();
-
-    // Invoked when camera stops receiving frames.
-    void onCameraFreezed(String errorDescription);
-
-    // Callback invoked when camera is opening.
-    void onCameraOpening(String cameraName);
-
-    // Callback invoked when first camera frame is available after camera is started.
-    void onFirstFrameAvailable();
-
-    // Callback invoked when camera is closed.
-    void onCameraClosed();
-  }
-
-  /**
-   * Camera switch handler - one of these functions are invoked with the result of switchCamera().
-   * The callback may be called on an arbitrary thread.
-   */
-  public interface CameraSwitchHandler {
-    // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
-    void onCameraSwitchDone(boolean isFrontCamera);
-
-    // Invoked on failure, e.g. camera is stopped or only one camera available.
-    void onCameraSwitchError(String errorDescription);
-  }
-
-  /**
-   * Switch camera to the next valid camera id. This can only be called while the camera is running.
-   * This function can be called from any thread.
-   */
-  void switchCamera(CameraSwitchHandler switchEventsHandler);
-
-  /**
-   * MediaRecorder add/remove handler - one of these functions are invoked with the result of
-   * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
-   * The callback may be called on an arbitrary thread.
-   */
-  public interface MediaRecorderHandler {
-    // Invoked on success.
-    void onMediaRecorderSuccess();
-
-    // Invoked on failure, e.g. camera is stopped or any exception happens.
-    void onMediaRecorderError(String errorDescription);
-  }
-
-  /**
-   * Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
-   * Once MediaRecorder is added to camera pipeline camera switch is not allowed.
-   * This function can be called from any thread.
-   */
-  void addMediaRecorderToCamera(MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler);
-
-  /**
-   * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
-   * This function can be called from any thread.
-   */
-  void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler);
-
-  /**
-   * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
-   * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
-   * thread.
-   */
-  public static class CameraStatistics {
-    private final static String TAG = "CameraStatistics";
-    private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
-    private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
-
-    private final SurfaceTextureHelper surfaceTextureHelper;
-    private final CameraEventsHandler eventsHandler;
-    private int frameCount;
-    private int freezePeriodCount;
-    // Camera observer - monitors camera framerate. Observer is executed on camera thread.
-    private final Runnable cameraObserver = new Runnable() {
-      @Override
-      public void run() {
-        final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
-        Logging.d(TAG, "Camera fps: " + cameraFps + ".");
-        if (frameCount == 0) {
-          ++freezePeriodCount;
-          if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
-              && eventsHandler != null) {
-            Logging.e(TAG, "Camera freezed.");
-            if (surfaceTextureHelper.isTextureInUse()) {
-              // This can only happen if we are capturing to textures.
-              eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
-            } else {
-              eventsHandler.onCameraFreezed("Camera failure.");
-            }
-            return;
-          }
-        } else {
-          freezePeriodCount = 0;
-        }
-        frameCount = 0;
-        surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
-      }
-    };
-
-    public CameraStatistics(
-        SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
-      if (surfaceTextureHelper == null) {
-        throw new IllegalArgumentException("SurfaceTextureHelper is null");
-      }
-      this.surfaceTextureHelper = surfaceTextureHelper;
-      this.eventsHandler = eventsHandler;
-      this.frameCount = 0;
-      this.freezePeriodCount = 0;
-      surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
-    }
-
-    private void checkThread() {
-      if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
-        throw new IllegalStateException("Wrong thread");
-      }
-    }
-
-    public void addFrame() {
-      checkThread();
-      ++frameCount;
-    }
-
-    public void release() {
-      surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
-    }
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DataChannel.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import java.nio.ByteBuffer;
-
-/** Java wrapper for a C++ DataChannelInterface. */
-public class DataChannel {
-  /** Java wrapper for WebIDL RTCDataChannel. */
-  public static class Init {
-    public boolean ordered = true;
-    // Optional unsigned short in WebIDL, -1 means unspecified.
-    public int maxRetransmitTimeMs = -1;
-    // Optional unsigned short in WebIDL, -1 means unspecified.
-    public int maxRetransmits = -1;
-    public String protocol = "";
-    public boolean negotiated = false;
-    // Optional unsigned short in WebIDL, -1 means unspecified.
-    public int id = -1;
-
-    @CalledByNative("Init")
-    boolean getOrdered() {
-      return ordered;
-    }
-
-    @CalledByNative("Init")
-    int getMaxRetransmitTimeMs() {
-      return maxRetransmitTimeMs;
-    }
-
-    @CalledByNative("Init")
-    int getMaxRetransmits() {
-      return maxRetransmits;
-    }
-
-    @CalledByNative("Init")
-    String getProtocol() {
-      return protocol;
-    }
-
-    @CalledByNative("Init")
-    boolean getNegotiated() {
-      return negotiated;
-    }
-
-    @CalledByNative("Init")
-    int getId() {
-      return id;
-    }
-  }
-
-  /** Java version of C++ DataBuffer.  The atom of data in a DataChannel. */
-  public static class Buffer {
-    /** The underlying data. */
-    public final ByteBuffer data;
-
-    /**
-     * Indicates whether |data| contains UTF-8 text or "binary data"
-     * (i.e. anything else).
-     */
-    public final boolean binary;
-
-    @CalledByNative("Buffer")
-    public Buffer(ByteBuffer data, boolean binary) {
-      this.data = data;
-      this.binary = binary;
-    }
-  }
-
-  /** Java version of C++ DataChannelObserver. */
-  public interface Observer {
-    /** The data channel's bufferedAmount has changed. */
-    @CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount);
-    /** The data channel state has changed. */
-    @CalledByNative("Observer") public void onStateChange();
-    /**
-     * A data buffer was successfully received.  NOTE: |buffer.data| will be
-     * freed once this function returns so callers who want to use the data
-     * asynchronously must make sure to copy it first.
-     */
-    @CalledByNative("Observer") public void onMessage(Buffer buffer);
-  }
-
-  /** Keep in sync with DataChannelInterface::DataState. */
-  public enum State {
-    CONNECTING,
-    OPEN,
-    CLOSING,
-    CLOSED;
-
-    @CalledByNative("State")
-    static State fromNativeIndex(int nativeIndex) {
-      return values()[nativeIndex];
-    }
-  }
-
-  private final long nativeDataChannel;
-  private long nativeObserver;
-
-  @CalledByNative
-  public DataChannel(long nativeDataChannel) {
-    this.nativeDataChannel = nativeDataChannel;
-  }
-
-  /** Register |observer|, replacing any previously-registered observer. */
-  public void registerObserver(Observer observer) {
-    if (nativeObserver != 0) {
-      unregisterObserverNative(nativeObserver);
-    }
-    nativeObserver = registerObserverNative(observer);
-  }
-  private native long registerObserverNative(Observer observer);
-
-  /** Unregister the (only) observer. */
-  public void unregisterObserver() {
-    unregisterObserverNative(nativeObserver);
-  }
-  private native void unregisterObserverNative(long nativeObserver);
-
-  public native String label();
-
-  public native int id();
-
-  public native State state();
-
-  /**
-   * Return the number of bytes of application data (UTF-8 text and binary data)
-   * that have been queued using SendBuffer but have not yet been transmitted
-   * to the network.
-   */
-  public native long bufferedAmount();
-
-  /** Close the channel. */
-  public native void close();
-
-  /** Send |data| to the remote peer; return success. */
-  public boolean send(Buffer buffer) {
-    // TODO(fischman): this could be cleverer about avoiding copies if the
-    // ByteBuffer is direct and/or is backed by an array.
-    byte[] data = new byte[buffer.data.remaining()];
-    buffer.data.get(data);
-    return sendNative(data, buffer.binary);
-  }
-  private native boolean sendNative(byte[] data, boolean binary);
-
-  /** Dispose of native resources attached to this channel. */
-  public void dispose() {
-    JniCommon.nativeReleaseRef(nativeDataChannel);
-  }
-
-  @CalledByNative
-  long getNativeDataChannel() {
-    return nativeDataChannel;
-  }
-};
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DefaultAudioProcessingFactory.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/** Factory for instantiating the default webrtc::AudioProcessing implementation. */
-public class DefaultAudioProcessingFactory implements AudioProcessingFactory {
-  public DefaultAudioProcessingFactory() {
-    this(null /* postProcessingFactory */);
-  }
-
-  /**
-   * Allows injecting a PostProcessingFactory. A null PostProcessingFactory creates a
-   * webrtc::AudioProcessing with nullptr webrtc::postProcessing.
-   */
-  public DefaultAudioProcessingFactory(PostProcessingFactory postProcessingFactory) {
-    this.postProcessingFactory = postProcessingFactory;
-  }
-
-  /**
-   * Creates a default webrtc::AudioProcessing module, which takes ownership of objects created by
-   * its factories.
-   */
-  @Override
-  public long createNative() {
-    long nativePostProcessor = 0;
-    if (postProcessingFactory != null) {
-      nativePostProcessor = postProcessingFactory.createNative();
-      if (nativePostProcessor == 0) {
-        throw new NullPointerException(
-            "PostProcessingFactory.createNative() may not return 0 (nullptr).");
-      }
-    }
-    return nativeCreateAudioProcessing(nativePostProcessor);
-  }
-
-  private PostProcessingFactory postProcessingFactory;
-
-  private static native long nativeCreateAudioProcessing(long nativePostProcessor);
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
-  private final HardwareVideoDecoderFactory hardwareVideoDecoderFactory;
-  private final SoftwareVideoDecoderFactory softwareVideoDecoderFactory;
-
-  public DefaultVideoDecoderFactory(EglBase.Context eglContext) {
-    hardwareVideoDecoderFactory =
-        new HardwareVideoDecoderFactory(eglContext, false /* fallbackToSoftware */);
-    softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
-  }
-
-  @Override
-  public VideoDecoder createDecoder(String codecType) {
-    VideoDecoder decoder = hardwareVideoDecoderFactory.createDecoder(codecType);
-    if (decoder != null) {
-      return decoder;
-    }
-    return softwareVideoDecoderFactory.createDecoder(codecType);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import java.util.Arrays;
-import java.util.LinkedHashSet;
-import java.util.List;
-
-public class DefaultVideoEncoderFactory implements VideoEncoderFactory {
-  private final VideoEncoderFactory hardwareVideoEncoderFactory;
-  private final VideoEncoderFactory softwareVideoEncoderFactory;
-
-  public DefaultVideoEncoderFactory(
-      EglBase.Context eglContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
-    hardwareVideoEncoderFactory = new HardwareVideoEncoderFactory(
-        eglContext, enableIntelVp8Encoder, enableH264HighProfile, false /* fallbackToSoftware */);
-    softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
-  }
-
-  /* This is used for testing. */
-  DefaultVideoEncoderFactory(VideoEncoderFactory hardwareVideoEncoderFactory) {
-    this.hardwareVideoEncoderFactory = hardwareVideoEncoderFactory;
-    softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
-  }
-
-  @Override
-  public VideoEncoder createEncoder(VideoCodecInfo info) {
-    final VideoEncoder videoEncoder = hardwareVideoEncoderFactory.createEncoder(info);
-    if (videoEncoder != null) {
-      return videoEncoder;
-    }
-    return softwareVideoEncoderFactory.createEncoder(info);
-  }
-
-  @Override
-  public VideoCodecInfo[] getSupportedCodecs() {
-    LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<VideoCodecInfo>();
-
-    supportedCodecInfos.addAll(Arrays.asList(softwareVideoEncoderFactory.getSupportedCodecs()));
-    supportedCodecInfos.addAll(Arrays.asList(hardwareVideoEncoderFactory.getSupportedCodecs()));
-
-    return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/DtmfSender.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/** Java wrapper for a C++ DtmfSenderInterface. */
-public class DtmfSender {
-  final long nativeDtmfSender;
-
-  public DtmfSender(long nativeDtmfSender) {
-    this.nativeDtmfSender = nativeDtmfSender;
-  }
-
-  /**
-   * @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
-   */
-  public boolean canInsertDtmf() {
-    return nativeCanInsertDtmf(nativeDtmfSender);
-  }
-
-  /**
-   * Queues a task that sends the provided DTMF tones.
-   * <p>
-   * If insertDtmf is called on the same object while an existing task for this
-   * object to generate DTMF is still running, the previous task is canceled.
-   *
-   * @param tones        This parameter is treated as a series of characters. The characters 0
-   *                     through 9, A through D, #, and * generate the associated DTMF tones. The
-   *                     characters a to d are equivalent to A to D. The character ',' indicates a
-   *                     delay of 2 seconds before processing the next character in the tones
-   *                     parameter. Unrecognized characters are ignored.
-   * @param duration     Indicates the duration in ms to use for each character passed in the tones
-   *                     parameter. The duration cannot be more than 6000 or less than 70.
-   * @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
-   *                     as short as possible.
-   * @return             true on success and false on failure.
-   */
-  public boolean insertDtmf(String tones, int duration, int interToneGap) {
-    return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
-  }
-
-  /**
-   * @return The tones remaining to be played out
-   */
-  public String tones() {
-    return nativeTones(nativeDtmfSender);
-  }
-
-  /**
-   * @return The current tone duration value in ms. This value will be the value last set via the
-   *         insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
-   */
-  public int duration() {
-    return nativeDuration(nativeDtmfSender);
-  }
-
-  /**
-   * @return The current value of the between-tone gap in ms. This value will be the value last set
-   *         via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
-   *         called.
-   */
-  public int interToneGap() {
-    return nativeInterToneGap(nativeDtmfSender);
-  }
-
-  public void dispose() {
-    JniCommon.nativeReleaseRef(nativeDtmfSender);
-  }
-
-  private static native boolean nativeCanInsertDtmf(long nativeDtmfSender);
-  private static native boolean nativeInsertDtmf(
-      long nativeDtmfSender, String tones, int duration, int interToneGap);
-  private static native String nativeTones(long nativeDtmfSender);
-  private static native int nativeDuration(long nativeDtmfSender);
-  private static native int nativeInterToneGap(long nativeDtmfSender);
-};
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/EglBase.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.graphics.SurfaceTexture;
-import android.view.Surface;
-
-import javax.microedition.khronos.egl.EGL10;
-
-/**
- * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
- * and an EGLSurface.
- */
-public abstract class EglBase {
-  // EGL wrapper for an actual EGLContext.
-  public interface Context { long getNativeEglContext(); }
-
-  // According to the documentation, EGL can be used from multiple threads at the same time if each
-  // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
-  // Therefore, synchronize on this global lock before calling dangerous EGL functions that might
-  // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
-  public static final Object lock = new Object();
-
-  // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
-  // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
-  // This is similar to how GlSurfaceView does:
-  // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
-  public static final int EGL_OPENGL_ES2_BIT = 4;
-  // Android-specific extension.
-  public static final int EGL_RECORDABLE_ANDROID = 0x3142;
-
-  // clang-format off
-  public static final int[] CONFIG_PLAIN = {
-    EGL10.EGL_RED_SIZE, 8,
-    EGL10.EGL_GREEN_SIZE, 8,
-    EGL10.EGL_BLUE_SIZE, 8,
-    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-    EGL10.EGL_NONE
-  };
-  public static final int[] CONFIG_RGBA = {
-    EGL10.EGL_RED_SIZE, 8,
-    EGL10.EGL_GREEN_SIZE, 8,
-    EGL10.EGL_BLUE_SIZE, 8,
-    EGL10.EGL_ALPHA_SIZE, 8,
-    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-    EGL10.EGL_NONE
-  };
-  public static final int[] CONFIG_PIXEL_BUFFER = {
-    EGL10.EGL_RED_SIZE, 8,
-    EGL10.EGL_GREEN_SIZE, 8,
-    EGL10.EGL_BLUE_SIZE, 8,
-    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
-    EGL10.EGL_NONE
-  };
-  public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
-    EGL10.EGL_RED_SIZE, 8,
-    EGL10.EGL_GREEN_SIZE, 8,
-    EGL10.EGL_BLUE_SIZE, 8,
-    EGL10.EGL_ALPHA_SIZE, 8,
-    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
-    EGL10.EGL_NONE
-  };
-  public static final int[] CONFIG_RECORDABLE = {
-    EGL10.EGL_RED_SIZE, 8,
-    EGL10.EGL_GREEN_SIZE, 8,
-    EGL10.EGL_BLUE_SIZE, 8,
-    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-    EGL_RECORDABLE_ANDROID, 1,
-    EGL10.EGL_NONE
-  };
-  // clang-format on
-
-  /**
-   * Create a new context with the specified config attributes, sharing data with |sharedContext|.
-   * If |sharedContext| is null, a root context is created. This function will try to create an EGL
-   * 1.4 context if possible, and an EGL 1.0 context otherwise.
-   */
-  public static EglBase create(Context sharedContext, int[] configAttributes) {
-    return (EglBase14.isEGL14Supported()
-               && (sharedContext == null || sharedContext instanceof EglBase14.Context))
-        ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
-        : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
-  }
-
-  /**
-   * Helper function for creating a plain root context. This function will try to create an EGL 1.4
-   * context if possible, and an EGL 1.0 context otherwise.
-   */
-  public static EglBase create() {
-    return create(null /* shaderContext */, CONFIG_PLAIN);
-  }
-
-  /**
-   * Helper function for creating a plain context, sharing data with |sharedContext|. This function
-   * will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
-   */
-  public static EglBase create(Context sharedContext) {
-    return create(sharedContext, CONFIG_PLAIN);
-  }
-
-  /**
-   * Explicitly create a root EGl 1.0 context with the specified config attributes.
-   */
-  public static EglBase createEgl10(int[] configAttributes) {
-    return new EglBase10(null /* shaderContext */, configAttributes);
-  }
-
-  /**
-   * Explicitly create a root EGl 1.0 context with the specified config attributes
-   * and shared context.
-   */
-  public static EglBase createEgl10(
-      javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
-    return new EglBase10(new EglBase10.Context(sharedContext), configAttributes);
-  }
-
-  /**
-   * Explicitly create a root EGl 1.4 context with the specified config attributes.
-   */
-  public static EglBase createEgl14(int[] configAttributes) {
-    return new EglBase14(null /* shaderContext */, configAttributes);
-  }
-
-  /**
-   * Explicitly create a root EGl 1.4 context with the specified config attributes
-   * and shared context.
-   */
-  public static EglBase createEgl14(
-      android.opengl.EGLContext sharedContext, int[] configAttributes) {
-    return new EglBase14(new EglBase14.Context(sharedContext), configAttributes);
-  }
-
-  public abstract void createSurface(Surface surface);
-
-  // Create EGLSurface from the Android SurfaceTexture.
-  public abstract void createSurface(SurfaceTexture surfaceTexture);
-
-  // Create dummy 1x1 pixel buffer surface so the context can be made current.
-  public abstract void createDummyPbufferSurface();
-
-  public abstract void createPbufferSurface(int width, int height);
-
-  public abstract Context getEglBaseContext();
-
-  public abstract boolean hasSurface();
-
-  public abstract int surfaceWidth();
-
-  public abstract int surfaceHeight();
-
-  public abstract void releaseSurface();
-
-  public abstract void release();
-
-  public abstract void makeCurrent();
-
-  // Detach the current EGL context, so that it can be made current on another thread.
-  public abstract void detachCurrent();
-
-  public abstract void swapBuffers();
-
-  public abstract void swapBuffers(long presentationTimeStampNs);
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/EglRenderer.java
+++ /dev/null
@@ -1,694 +0,0 @@
-/*
- *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.graphics.Bitmap;
-import android.graphics.Matrix;
-import android.graphics.SurfaceTexture;
-import android.opengl.GLES20;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.os.Looper;
-import android.view.Surface;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.Locale;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on an EGL Surface.
- * This class is intended to be used as a helper class for rendering on SurfaceViews and
- * TextureViews.
- */
-public class EglRenderer implements VideoRenderer.Callbacks, VideoSink {
-  private static final String TAG = "EglRenderer";
-  private static final long LOG_INTERVAL_SEC = 4;
-
-  public interface FrameListener { void onFrame(Bitmap frame); }
-
-  private static class FrameListenerAndParams {
-    public final FrameListener listener;
-    public final float scale;
-    public final RendererCommon.GlDrawer drawer;
-    public final boolean applyFpsReduction;
-
-    public FrameListenerAndParams(FrameListener listener, float scale,
-        RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
-      this.listener = listener;
-      this.scale = scale;
-      this.drawer = drawer;
-      this.applyFpsReduction = applyFpsReduction;
-    }
-  }
-
-  private class EglSurfaceCreation implements Runnable {
-    private Object surface;
-
-    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
-    @SuppressWarnings("NoSynchronizedMethodCheck")
-    public synchronized void setSurface(Object surface) {
-      this.surface = surface;
-    }
-
-    @Override
-    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
-    @SuppressWarnings("NoSynchronizedMethodCheck")
-    public synchronized void run() {
-      if (surface != null && eglBase != null && !eglBase.hasSurface()) {
-        if (surface instanceof Surface) {
-          eglBase.createSurface((Surface) surface);
-        } else if (surface instanceof SurfaceTexture) {
-          eglBase.createSurface((SurfaceTexture) surface);
-        } else {
-          throw new IllegalStateException("Invalid surface: " + surface);
-        }
-        eglBase.makeCurrent();
-        // Necessary for YUV frames with odd width.
-        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
-      }
-    }
-  }
-
-  protected final String name;
-
-  // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
-  // on |handlerLock|.
-  private final Object handlerLock = new Object();
-  private Handler renderThreadHandler;
-
-  private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
-
-  // Variables for fps reduction.
-  private final Object fpsReductionLock = new Object();
-  // Time for when next frame should be rendered.
-  private long nextFrameTimeNs;
-  // Minimum duration between frames when fps reduction is active, or -1 if video is completely
-  // paused.
-  private long minRenderPeriodNs;
-
-  // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
-  // from the render thread.
-  private EglBase eglBase;
-  private final VideoFrameDrawer frameDrawer = new VideoFrameDrawer();
-  private RendererCommon.GlDrawer drawer;
-  private final Matrix drawMatrix = new Matrix();
-
-  // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
-  private final Object frameLock = new Object();
-  private VideoFrame pendingFrame;
-
-  // These variables are synchronized on |layoutLock|.
-  private final Object layoutLock = new Object();
-  private float layoutAspectRatio;
-  // If true, mirrors the video stream horizontally.
-  private boolean mirror;
-
-  // These variables are synchronized on |statisticsLock|.
-  private final Object statisticsLock = new Object();
-  // Total number of video frames received in renderFrame() call.
-  private int framesReceived;
-  // Number of video frames dropped by renderFrame() because previous frame has not been rendered
-  // yet.
-  private int framesDropped;
-  // Number of rendered video frames.
-  private int framesRendered;
-  // Start time for counting these statistics, or 0 if we haven't started measuring yet.
-  private long statisticsStartTimeNs;
-  // Time in ns spent in renderFrameOnRenderThread() function.
-  private long renderTimeNs;
-  // Time in ns spent by the render thread in the swapBuffers() function.
-  private long renderSwapBufferTimeNs;
-
-  // Used for bitmap capturing.
-  private GlTextureFrameBuffer bitmapTextureFramebuffer;
-
-  private final Runnable logStatisticsRunnable = new Runnable() {
-    @Override
-    public void run() {
-      logStatistics();
-      synchronized (handlerLock) {
-        if (renderThreadHandler != null) {
-          renderThreadHandler.removeCallbacks(logStatisticsRunnable);
-          renderThreadHandler.postDelayed(
-              logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
-        }
-      }
-    }
-  };
-
-  private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
-
-  /**
-   * Standard constructor. The name will be used for the render thread name and included when
-   * logging. In order to render something, you must first call init() and createEglSurface.
-   */
-  public EglRenderer(String name) {
-    this.name = name;
-  }
-
-  /**
-   * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
-   * for drawing frames on the EGLSurface. This class is responsible for calling release() on
-   * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
-   * init()/release() cycle.
-   */
-  public void init(final EglBase.Context sharedContext, final int[] configAttributes,
-      RendererCommon.GlDrawer drawer) {
-    synchronized (handlerLock) {
-      if (renderThreadHandler != null) {
-        throw new IllegalStateException(name + "Already initialized");
-      }
-      logD("Initializing EglRenderer");
-      this.drawer = drawer;
-
-      final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
-      renderThread.start();
-      renderThreadHandler = new Handler(renderThread.getLooper());
-      // Create EGL context on the newly created render thread. It should be possibly to create the
-      // context on this thread and make it current on the render thread, but this causes failure on
-      // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
-      ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
-        // If sharedContext is null, then texture frames are disabled. This is typically for old
-        // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
-        // caused trouble on some weird devices.
-        if (sharedContext == null) {
-          logD("EglBase10.create context");
-          eglBase = EglBase.createEgl10(configAttributes);
-        } else {
-          logD("EglBase.create shared context");
-          eglBase = EglBase.create(sharedContext, configAttributes);
-        }
-      });
-      renderThreadHandler.post(eglSurfaceCreationRunnable);
-      final long currentTimeNs = System.nanoTime();
-      resetStatistics(currentTimeNs);
-      renderThreadHandler.postDelayed(
-          logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
-    }
-  }
-
-  public void createEglSurface(Surface surface) {
-    createEglSurfaceInternal(surface);
-  }
-
-  public void createEglSurface(SurfaceTexture surfaceTexture) {
-    createEglSurfaceInternal(surfaceTexture);
-  }
-
-  private void createEglSurfaceInternal(Object surface) {
-    eglSurfaceCreationRunnable.setSurface(surface);
-    postToRenderThread(eglSurfaceCreationRunnable);
-  }
-
-  /**
-   * Block until any pending frame is returned and all GL resources released, even if an interrupt
-   * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
-   * should be called before the Activity is destroyed and the EGLContext is still valid. If you
-   * don't call this function, the GL resources might leak.
-   */
-  public void release() {
-    logD("Releasing.");
-    final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
-    synchronized (handlerLock) {
-      if (renderThreadHandler == null) {
-        logD("Already released");
-        return;
-      }
-      renderThreadHandler.removeCallbacks(logStatisticsRunnable);
-      // Release EGL and GL resources on render thread.
-      renderThreadHandler.postAtFrontOfQueue(() -> {
-        if (drawer != null) {
-          drawer.release();
-          drawer = null;
-        }
-        frameDrawer.release();
-        if (bitmapTextureFramebuffer != null) {
-          bitmapTextureFramebuffer.release();
-          bitmapTextureFramebuffer = null;
-        }
-        if (eglBase != null) {
-          logD("eglBase detach and release.");
-          eglBase.detachCurrent();
-          eglBase.release();
-          eglBase = null;
-        }
-        frameListeners.clear();
-        eglCleanupBarrier.countDown();
-      });
-      final Looper renderLooper = renderThreadHandler.getLooper();
-      // TODO(magjed): Replace this post() with renderLooper.quitSafely() when API support >= 18.
-      renderThreadHandler.post(() -> {
-        logD("Quitting render thread.");
-        renderLooper.quit();
-      });
-      // Don't accept any more frames or messages to the render thread.
-      renderThreadHandler = null;
-    }
-    // Make sure the EGL/GL cleanup posted above is executed.
-    ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
-    synchronized (frameLock) {
-      if (pendingFrame != null) {
-        pendingFrame.release();
-        pendingFrame = null;
-      }
-    }
-    logD("Releasing done.");
-  }
-
-  /**
-   * Reset the statistics logged in logStatistics().
-   */
-  private void resetStatistics(long currentTimeNs) {
-    synchronized (statisticsLock) {
-      statisticsStartTimeNs = currentTimeNs;
-      framesReceived = 0;
-      framesDropped = 0;
-      framesRendered = 0;
-      renderTimeNs = 0;
-      renderSwapBufferTimeNs = 0;
-    }
-  }
-
-  public void printStackTrace() {
-    synchronized (handlerLock) {
-      final Thread renderThread =
-          (renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
-      if (renderThread != null) {
-        final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
-        if (renderStackTrace.length > 0) {
-          logD("EglRenderer stack trace:");
-          for (StackTraceElement traceElem : renderStackTrace) {
-            logD(traceElem.toString());
-          }
-        }
-      }
-    }
-  }
-
-  /**
-   * Set if the video stream should be mirrored or not.
-   */
-  public void setMirror(final boolean mirror) {
-    logD("setMirror: " + mirror);
-    synchronized (layoutLock) {
-      this.mirror = mirror;
-    }
-  }
-
-  /**
-   * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
-   * Set this to 0 to disable cropping.
-   */
-  public void setLayoutAspectRatio(float layoutAspectRatio) {
-    logD("setLayoutAspectRatio: " + layoutAspectRatio);
-    synchronized (layoutLock) {
-      this.layoutAspectRatio = layoutAspectRatio;
-    }
-  }
-
-  /**
-   * Limit render framerate.
-   *
-   * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
-   *            reduction.
-   */
-  public void setFpsReduction(float fps) {
-    logD("setFpsReduction: " + fps);
-    synchronized (fpsReductionLock) {
-      final long previousRenderPeriodNs = minRenderPeriodNs;
-      if (fps <= 0) {
-        minRenderPeriodNs = Long.MAX_VALUE;
-      } else {
-        minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
-      }
-      if (minRenderPeriodNs != previousRenderPeriodNs) {
-        // Fps reduction changed - reset frame time.
-        nextFrameTimeNs = System.nanoTime();
-      }
-    }
-  }
-
-  public void disableFpsReduction() {
-    setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
-  }
-
-  public void pauseVideo() {
-    setFpsReduction(0 /* fps */);
-  }
-
-  /**
-   * Register a callback to be invoked when a new video frame has been received. This version uses
-   * the drawer of the EglRenderer that was passed in init.
-   *
-   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
-   *                 It should be lightweight and must not call removeFrameListener.
-   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
-   *                 required.
-   */
-  public void addFrameListener(final FrameListener listener, final float scale) {
-    addFrameListener(listener, scale, null, false /* applyFpsReduction */);
-  }
-
-  /**
-   * Register a callback to be invoked when a new video frame has been received.
-   *
-   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
-   *                 It should be lightweight and must not call removeFrameListener.
-   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
-   *                 required.
-   * @param drawer   Custom drawer to use for this frame listener or null to use the default one.
-   */
-  public void addFrameListener(
-      final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
-    addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
-  }
-
-  /**
-   * Register a callback to be invoked when a new video frame has been received.
-   *
-   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
-   *                 It should be lightweight and must not call removeFrameListener.
-   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
-   *                 required.
-   * @param drawer   Custom drawer to use for this frame listener or null to use the default one.
-   * @param applyFpsReduction This callback will not be called for frames that have been dropped by
-   *                          FPS reduction.
-   */
-  public void addFrameListener(final FrameListener listener, final float scale,
-      final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
-    postToRenderThread(() -> {
-      final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
-      frameListeners.add(
-          new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
-    });
-  }
-
-  /**
-   * Remove any pending callback that was added with addFrameListener. If the callback is not in
-   * the queue, nothing happens. It is ensured that callback won't be called after this method
-   * returns.
-   *
-   * @param runnable The callback to remove.
-   */
-  public void removeFrameListener(final FrameListener listener) {
-    final CountDownLatch latch = new CountDownLatch(1);
-    synchronized (handlerLock) {
-      if (renderThreadHandler == null) {
-        return;
-      }
-      if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
-        throw new RuntimeException("removeFrameListener must not be called on the render thread.");
-      }
-      postToRenderThread(() -> {
-        latch.countDown();
-        final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
-        while (iter.hasNext()) {
-          if (iter.next().listener == listener) {
-            iter.remove();
-          }
-        }
-      });
-    }
-    ThreadUtils.awaitUninterruptibly(latch);
-  }
-
-  // VideoRenderer.Callbacks interface.
-  @Override
-  public void renderFrame(VideoRenderer.I420Frame frame) {
-    VideoFrame videoFrame = frame.toVideoFrame();
-    onFrame(videoFrame);
-    videoFrame.release();
-  }
-
-  // VideoSink interface.
-  @Override
-  public void onFrame(VideoFrame frame) {
-    synchronized (statisticsLock) {
-      ++framesReceived;
-    }
-    final boolean dropOldFrame;
-    synchronized (handlerLock) {
-      if (renderThreadHandler == null) {
-        logD("Dropping frame - Not initialized or already released.");
-        return;
-      }
-      synchronized (frameLock) {
-        dropOldFrame = (pendingFrame != null);
-        if (dropOldFrame) {
-          pendingFrame.release();
-        }
-        pendingFrame = frame;
-        pendingFrame.retain();
-        renderThreadHandler.post(this ::renderFrameOnRenderThread);
-      }
-    }
-    if (dropOldFrame) {
-      synchronized (statisticsLock) {
-        ++framesDropped;
-      }
-    }
-  }
-
-  /**
-   * Release EGL surface. This function will block until the EGL surface is released.
-   */
-  public void releaseEglSurface(final Runnable completionCallback) {
-    // Ensure that the render thread is no longer touching the Surface before returning from this
-    // function.
-    eglSurfaceCreationRunnable.setSurface(null /* surface */);
-    synchronized (handlerLock) {
-      if (renderThreadHandler != null) {
-        renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
-        renderThreadHandler.postAtFrontOfQueue(() -> {
-          if (eglBase != null) {
-            eglBase.detachCurrent();
-            eglBase.releaseSurface();
-          }
-          completionCallback.run();
-        });
-        return;
-      }
-    }
-    completionCallback.run();
-  }
-
-  /**
-   * Private helper function to post tasks safely.
-   */
-  private void postToRenderThread(Runnable runnable) {
-    synchronized (handlerLock) {
-      if (renderThreadHandler != null) {
-        renderThreadHandler.post(runnable);
-      }
-    }
-  }
-
-  private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
-    if (eglBase != null && eglBase.hasSurface()) {
-      logD("clearSurface");
-      GLES20.glClearColor(r, g, b, a);
-      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
-      eglBase.swapBuffers();
-    }
-  }
-
-  /**
-   * Post a task to clear the surface to a transparent uniform color.
-   */
-  public void clearImage() {
-    clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
-  }
-
-  /**
-   * Post a task to clear the surface to a specific color.
-   */
-  public void clearImage(final float r, final float g, final float b, final float a) {
-    synchronized (handlerLock) {
-      if (renderThreadHandler == null) {
-        return;
-      }
-      renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
-    }
-  }
-
-  /**
-   * Renders and releases |pendingFrame|.
-   */
-  private void renderFrameOnRenderThread() {
-    // Fetch and render |pendingFrame|.
-    final VideoFrame frame;
-    synchronized (frameLock) {
-      if (pendingFrame == null) {
-        return;
-      }
-      frame = pendingFrame;
-      pendingFrame = null;
-    }
-    if (eglBase == null || !eglBase.hasSurface()) {
-      logD("Dropping frame - No surface");
-      frame.release();
-      return;
-    }
-    // Check if fps reduction is active.
-    final boolean shouldRenderFrame;
-    synchronized (fpsReductionLock) {
-      if (minRenderPeriodNs == Long.MAX_VALUE) {
-        // Rendering is paused.
-        shouldRenderFrame = false;
-      } else if (minRenderPeriodNs <= 0) {
-        // FPS reduction is disabled.
-        shouldRenderFrame = true;
-      } else {
-        final long currentTimeNs = System.nanoTime();
-        if (currentTimeNs < nextFrameTimeNs) {
-          logD("Skipping frame rendering - fps reduction is active.");
-          shouldRenderFrame = false;
-        } else {
-          nextFrameTimeNs += minRenderPeriodNs;
-          // The time for the next frame should always be in the future.
-          nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
-          shouldRenderFrame = true;
-        }
-      }
-    }
-
-    final long startTimeNs = System.nanoTime();
-
-    final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
-    final float drawnAspectRatio;
-    synchronized (layoutLock) {
-      drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
-    }
-
-    final float scaleX;
-    final float scaleY;
-
-    if (frameAspectRatio > drawnAspectRatio) {
-      scaleX = drawnAspectRatio / frameAspectRatio;
-      scaleY = 1f;
-    } else {
-      scaleX = 1f;
-      scaleY = frameAspectRatio / drawnAspectRatio;
-    }
-
-    drawMatrix.reset();
-    drawMatrix.preTranslate(0.5f, 0.5f);
-    if (mirror)
-      drawMatrix.preScale(-1f, 1f);
-    drawMatrix.preScale(scaleX, scaleY);
-    drawMatrix.preTranslate(-0.5f, -0.5f);
-
-    if (shouldRenderFrame) {
-      GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
-      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
-      frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
-          eglBase.surfaceWidth(), eglBase.surfaceHeight());
-
-      final long swapBuffersStartTimeNs = System.nanoTime();
-      eglBase.swapBuffers();
-
-      final long currentTimeNs = System.nanoTime();
-      synchronized (statisticsLock) {
-        ++framesRendered;
-        renderTimeNs += (currentTimeNs - startTimeNs);
-        renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
-      }
-    }
-
-    notifyCallbacks(frame, shouldRenderFrame);
-    frame.release();
-  }
-
-  private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
-    if (frameListeners.isEmpty())
-      return;
-
-    drawMatrix.reset();
-    drawMatrix.preTranslate(0.5f, 0.5f);
-    if (mirror)
-      drawMatrix.preScale(-1f, 1f);
-    drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
-    drawMatrix.preTranslate(-0.5f, -0.5f);
-
-    Iterator<FrameListenerAndParams> it = frameListeners.iterator();
-    while (it.hasNext()) {
-      FrameListenerAndParams listenerAndParams = it.next();
-      if (!wasRendered && listenerAndParams.applyFpsReduction) {
-        continue;
-      }
-      it.remove();
-
-      final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
-      final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
-
-      if (scaledWidth == 0 || scaledHeight == 0) {
-        listenerAndParams.listener.onFrame(null);
-        continue;
-      }
-
-      if (bitmapTextureFramebuffer == null) {
-        bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
-      }
-      bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
-
-      GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
-      GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
-          GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
-
-      GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
-      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
-      frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
-          0 /* viewportY */, scaledWidth, scaledHeight);
-
-      final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
-      GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
-      GLES20.glReadPixels(
-          0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
-
-      GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
-      GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
-
-      final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
-      bitmap.copyPixelsFromBuffer(bitmapBuffer);
-      listenerAndParams.listener.onFrame(bitmap);
-    }
-  }
-
-  private String averageTimeAsString(long sumTimeNs, int count) {
-    return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " μs";
-  }
-
-  private void logStatistics() {
-    final long currentTimeNs = System.nanoTime();
-    synchronized (statisticsLock) {
-      final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
-      if (elapsedTimeNs <= 0) {
-        return;
-      }
-      final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
-      logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
-          + " Frames received: " + framesReceived + "."
-          + " Dropped: " + framesDropped + "."
-          + " Rendered: " + framesRendered + "."
-          + " Render fps: " + String.format(Locale.US, "%.1f", renderFps) + "."
-          + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
-          + " Average swapBuffer time: "
-          + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
-      resetStatistics(currentTimeNs);
-    }
-  }
-
-  private void logD(String string) {
-    Logging.d(TAG, name + string);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/EncodedImage.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import java.nio.ByteBuffer;
-import java.util.concurrent.TimeUnit;
-
-/**
- * An encoded frame from a video stream. Used as an input for decoders and as an output for
- * encoders.
- */
-public class EncodedImage {
-  // Must be kept in sync with common_types.h FrameType.
-  public enum FrameType {
-    EmptyFrame(0),
-    VideoFrameKey(3),
-    VideoFrameDelta(4);
-
-    private final int nativeIndex;
-
-    private FrameType(int nativeIndex) {
-      this.nativeIndex = nativeIndex;
-    }
-
-    public int getNative() {
-      return nativeIndex;
-    }
-
-    @CalledByNative("FrameType")
-    static FrameType fromNativeIndex(int nativeIndex) {
-      for (FrameType type : FrameType.values()) {
-        if (type.getNative() == nativeIndex) {
-          return type;
-        }
-      }
-      throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
-    }
-  }
-
-  public final ByteBuffer buffer;
-  public final int encodedWidth;
-  public final int encodedHeight;
-  public final long captureTimeMs; // Deprecated
-  public final long captureTimeNs;
-  public final FrameType frameType;
-  public final int rotation;
-  public final boolean completeFrame;
-  public final Integer qp;
-
-  @CalledByNative
-  private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
-      FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
-    this.buffer = buffer;
-    this.encodedWidth = encodedWidth;
-    this.encodedHeight = encodedHeight;
-    this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
-    this.captureTimeNs = captureTimeNs;
-    this.frameType = frameType;
-    this.rotation = rotation;
-    this.completeFrame = completeFrame;
-    this.qp = qp;
-  }
-
-  public static Builder builder() {
-    return new Builder();
-  }
-
-  public static class Builder {
-    private ByteBuffer buffer;
-    private int encodedWidth;
-    private int encodedHeight;
-    private long captureTimeNs;
-    private EncodedImage.FrameType frameType;
-    private int rotation;
-    private boolean completeFrame;
-    private Integer qp;
-
-    private Builder() {}
-
-    public Builder setBuffer(ByteBuffer buffer) {
-      this.buffer = buffer;
-      return this;
-    }
-
-    public Builder setEncodedWidth(int encodedWidth) {
-      this.encodedWidth = encodedWidth;
-      return this;
-    }
-
-    public Builder setEncodedHeight(int encodedHeight) {
-      this.encodedHeight = encodedHeight;
-      return this;
-    }
-
-    @Deprecated
-    public Builder setCaptureTimeMs(long captureTimeMs) {
-      this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
-      return this;
-    }
-
-    public Builder setCaptureTimeNs(long captureTimeNs) {
-      this.captureTimeNs = captureTimeNs;
-      return this;
-    }
-
-    public Builder setFrameType(EncodedImage.FrameType frameType) {
-      this.frameType = frameType;
-      return this;
-    }
-
-    public Builder setRotation(int rotation) {
-      this.rotation = rotation;
-      return this;
-    }
-
-    public Builder setCompleteFrame(boolean completeFrame) {
-      this.completeFrame = completeFrame;
-      return this;
-    }
-
-    public Builder setQp(Integer qp) {
-      this.qp = qp;
-      return this;
-    }
-
-    public EncodedImage createEncodedImage() {
-      return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeNs, frameType,
-          rotation, completeFrame, qp);
-    }
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.content.Context;
-import android.os.SystemClock;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.concurrent.TimeUnit;
-
-public class FileVideoCapturer implements VideoCapturer {
-  private interface VideoReader {
-    VideoFrame getNextFrame();
-    void close();
-  }
-
-  /**
-   * Read video data from file for the .y4m container.
-   */
-  private static class VideoReaderY4M implements VideoReader {
-    private static final String TAG = "VideoReaderY4M";
-    private static final String Y4M_FRAME_DELIMETER = "FRAME";
-
-    private final int frameWidth;
-    private final int frameHeight;
-    // First char after header
-    private final long videoStart;
-    private final RandomAccessFile mediaFileStream;
-
-    public VideoReaderY4M(String file) throws IOException {
-      mediaFileStream = new RandomAccessFile(file, "r");
-      StringBuilder builder = new StringBuilder();
-      for (;;) {
-        int c = mediaFileStream.read();
-        if (c == -1) {
-          // End of file reached.
-          throw new RuntimeException("Found end of file before end of header for file: " + file);
-        }
-        if (c == '\n') {
-          // End of header found.
-          break;
-        }
-        builder.append((char) c);
-      }
-      videoStart = mediaFileStream.getFilePointer();
-      String header = builder.toString();
-      String[] headerTokens = header.split("[ ]");
-      int w = 0;
-      int h = 0;
-      String colorSpace = "";
-      for (String tok : headerTokens) {
-        char c = tok.charAt(0);
-        switch (c) {
-          case 'W':
-            w = Integer.parseInt(tok.substring(1));
-            break;
-          case 'H':
-            h = Integer.parseInt(tok.substring(1));
-            break;
-          case 'C':
-            colorSpace = tok.substring(1);
-            break;
-        }
-      }
-      Logging.d(TAG, "Color space: " + colorSpace);
-      if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
-        throw new IllegalArgumentException(
-            "Does not support any other color space than I420 or I420mpeg2");
-      }
-      if ((w % 2) == 1 || (h % 2) == 1) {
-        throw new IllegalArgumentException("Does not support odd width or height");
-      }
-      frameWidth = w;
-      frameHeight = h;
-      Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
-    }
-
-    @Override
-    public VideoFrame getNextFrame() {
-      final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
-      final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
-      final ByteBuffer dataY = buffer.getDataY();
-      final ByteBuffer dataU = buffer.getDataU();
-      final ByteBuffer dataV = buffer.getDataV();
-      final int chromaHeight = (frameHeight + 1) / 2;
-      final int sizeY = frameHeight * buffer.getStrideY();
-      final int sizeU = chromaHeight * buffer.getStrideU();
-      final int sizeV = chromaHeight * buffer.getStrideV();
-
-      try {
-        byte[] frameDelim = new byte[Y4M_FRAME_DELIMETER.length() + 1];
-        if (mediaFileStream.read(frameDelim) < frameDelim.length) {
-          // We reach end of file, loop
-          mediaFileStream.seek(videoStart);
-          if (mediaFileStream.read(frameDelim) < frameDelim.length) {
-            throw new RuntimeException("Error looping video");
-          }
-        }
-        String frameDelimStr = new String(frameDelim, Charset.forName("US-ASCII"));
-        if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
-          throw new RuntimeException(
-              "Frames should be delimited by FRAME plus newline, found delimter was: '"
-              + frameDelimStr + "'");
-        }
-
-        mediaFileStream.readFully(dataY.array(), dataY.arrayOffset(), sizeY);
-        mediaFileStream.readFully(dataU.array(), dataU.arrayOffset(), sizeU);
-        mediaFileStream.readFully(dataV.array(), dataV.arrayOffset(), sizeV);
-      } catch (IOException e) {
-        throw new RuntimeException(e);
-      }
-
-      return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
-    }
-
-    @Override
-    public void close() {
-      try {
-        mediaFileStream.close();
-      } catch (IOException e) {
-        Logging.e(TAG, "Problem closing file", e);
-      }
-    }
-  }
-
-  private final static String TAG = "FileVideoCapturer";
-  private final VideoReader videoReader;
-  private CapturerObserver capturerObserver;
-  private final Timer timer = new Timer();
-
-  private final TimerTask tickTask = new TimerTask() {
-    @Override
-    public void run() {
-      tick();
-    }
-  };
-
-  public FileVideoCapturer(String inputFile) throws IOException {
-    try {
-      videoReader = new VideoReaderY4M(inputFile);
-    } catch (IOException e) {
-      Logging.d(TAG, "Could not open video file: " + inputFile);
-      throw e;
-    }
-  }
-
-  public void tick() {
-    capturerObserver.onFrameCaptured(videoReader.getNextFrame());
-  }
-
-  @Override
-  public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
-      CapturerObserver capturerObserver) {
-    this.capturerObserver = capturerObserver;
-  }
-
-  @Override
-  public void startCapture(int width, int height, int framerate) {
-    timer.schedule(tickTask, 0, 1000 / framerate);
-  }
-
-  @Override
-  public void stopCapture() throws InterruptedException {
-    timer.cancel();
-  }
-
-  @Override
-  public void changeCaptureFormat(int width, int height, int framerate) {
-    // Empty on purpose
-  }
-
-  @Override
-  public void dispose() {
-    videoReader.close();
-  }
-
-  @Override
-  public boolean isScreencast() {
-    return false;
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlRectDrawer.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
-import java.nio.FloatBuffer;
-import java.util.IdentityHashMap;
-import java.util.Map;
-
-/**
- * Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
- * cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
- * be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
- * calls, this is intentional to maximize performance. The function release() must be called
- * manually to free the resources held by this object.
- */
-public class GlRectDrawer implements RendererCommon.GlDrawer {
-  // clang-format off
-  // Simple vertex shader, used for both YUV and OES.
-  private static final String VERTEX_SHADER_STRING =
-        "varying vec2 interp_tc;\n"
-      + "attribute vec4 in_pos;\n"
-      + "attribute vec4 in_tc;\n"
-      + "\n"
-      + "uniform mat4 texMatrix;\n"
-      + "\n"
-      + "void main() {\n"
-      + "    gl_Position = in_pos;\n"
-      + "    interp_tc = (texMatrix * in_tc).xy;\n"
-      + "}\n";
-
-  private static final String YUV_FRAGMENT_SHADER_STRING =
-        "precision mediump float;\n"
-      + "varying vec2 interp_tc;\n"
-      + "\n"
-      + "uniform sampler2D y_tex;\n"
-      + "uniform sampler2D u_tex;\n"
-      + "uniform sampler2D v_tex;\n"
-      + "\n"
-      + "void main() {\n"
-      // CSC according to http://www.fourcc.org/fccyvrgb.php
-      + "  float y = texture2D(y_tex, interp_tc).r;\n"
-      + "  float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
-      + "  float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
-      + "  gl_FragColor = vec4(y + 1.403 * v, "
-      + "                      y - 0.344 * u - 0.714 * v, "
-      + "                      y + 1.77 * u, 1);\n"
-      + "}\n";
-
-  private static final String RGB_FRAGMENT_SHADER_STRING =
-        "precision mediump float;\n"
-      + "varying vec2 interp_tc;\n"
-      + "\n"
-      + "uniform sampler2D rgb_tex;\n"
-      + "\n"
-      + "void main() {\n"
-      + "  gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
-      + "}\n";
-
-  private static final String OES_FRAGMENT_SHADER_STRING =
-        "#extension GL_OES_EGL_image_external : require\n"
-      + "precision mediump float;\n"
-      + "varying vec2 interp_tc;\n"
-      + "\n"
-      + "uniform samplerExternalOES oes_tex;\n"
-      + "\n"
-      + "void main() {\n"
-      + "  gl_FragColor = texture2D(oes_tex, interp_tc);\n"
-      + "}\n";
-  // clang-format on
-
-  // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
-  // top-right.
-  private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(new float[] {
-      -1.0f, -1.0f, // Bottom left.
-      1.0f, -1.0f, // Bottom right.
-      -1.0f, 1.0f, // Top left.
-      1.0f, 1.0f, // Top right.
-  });
-
-  // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
-  private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = GlUtil.createFloatBuffer(new float[] {
-      0.0f, 0.0f, // Bottom left.
-      1.0f, 0.0f, // Bottom right.
-      0.0f, 1.0f, // Top left.
-      1.0f, 1.0f // Top right.
-  });
-
-  private static class Shader {
-    public final GlShader glShader;
-    public final int texMatrixLocation;
-
-    public Shader(String fragmentShader) {
-      this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
-      this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
-    }
-  }
-
-  // The keys are one of the fragments shaders above.
-  private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
-
-  /**
-   * Draw an OES texture frame with specified texture transformation matrix. Required resources are
-   * allocated at the first call to this function.
-   */
-  @Override
-  public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
-      int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
-    prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
-    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
-    // updateTexImage() may be called from another thread in another EGL context, so we need to
-    // bind/unbind the texture in each draw call so that GLES understads it's a new texture.
-    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
-    drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
-    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
-  }
-
-  /**
-   * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
-   * are allocated at the first call to this function.
-   */
-  @Override
-  public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
-      int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
-    prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
-    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
-    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
-    drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
-    // Unbind the texture as a precaution.
-    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
-  }
-
-  /**
-   * Draw a YUV frame with specified texture transformation matrix. Required resources are
-   * allocated at the first call to this function.
-   */
-  @Override
-  public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
-      int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
-    prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
-    // Bind the textures.
-    for (int i = 0; i < 3; ++i) {
-      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
-      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
-    }
-    drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
-    // Unbind the textures as a precaution..
-    for (int i = 0; i < 3; ++i) {
-      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
-      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
-    }
-  }
-
-  private void drawRectangle(int x, int y, int width, int height) {
-    // Draw quad.
-    GLES20.glViewport(x, y, width, height);
-    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
-  }
-
-  private void prepareShader(String fragmentShader, float[] texMatrix) {
-    final Shader shader;
-    if (shaders.containsKey(fragmentShader)) {
-      shader = shaders.get(fragmentShader);
-    } else {
-      // Lazy allocation.
-      shader = new Shader(fragmentShader);
-      shaders.put(fragmentShader, shader);
-      shader.glShader.useProgram();
-      // Initialize fragment shader uniform values.
-      if (YUV_FRAGMENT_SHADER_STRING.equals(fragmentShader)) {
-        GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
-        GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
-        GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
-      } else if (RGB_FRAGMENT_SHADER_STRING.equals(fragmentShader)) {
-        GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
-      } else if (OES_FRAGMENT_SHADER_STRING.equals(fragmentShader)) {
-        GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
-      } else {
-        throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
-      }
-      GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
-      // Initialize vertex shader attributes.
-      shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
-      shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
-    }
-    shader.glShader.useProgram();
-    // Copy the texture transformation matrix over.
-    GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
-  }
-
-  /**
-   * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
-   */
-  @Override
-  public void release() {
-    for (Shader shader : shaders.values()) {
-      shader.glShader.release();
-    }
-    shaders.clear();
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlShader.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.opengl.GLES20;
-
-import java.nio.FloatBuffer;
-
-// Helper class for handling OpenGL shaders and shader programs.
-public class GlShader {
-  private static final String TAG = "GlShader";
-
-  private static int compileShader(int shaderType, String source) {
-    final int shader = GLES20.glCreateShader(shaderType);
-    if (shader == 0) {
-      throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
-    }
-    GLES20.glShaderSource(shader, source);
-    GLES20.glCompileShader(shader);
-    int[] compileStatus = new int[] {GLES20.GL_FALSE};
-    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
-    if (compileStatus[0] != GLES20.GL_TRUE) {
-      Logging.e(
-          TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader));
-      throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
-    }
-    GlUtil.checkNoGLES2Error("compileShader");
-    return shader;
-  }
-
-  private int program;
-
-  public GlShader(String vertexSource, String fragmentSource) {
-    final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
-    final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
-    program = GLES20.glCreateProgram();
-    if (program == 0) {
-      throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
-    }
-    GLES20.glAttachShader(program, vertexShader);
-    GLES20.glAttachShader(program, fragmentShader);
-    GLES20.glLinkProgram(program);
-    int[] linkStatus = new int[] {GLES20.GL_FALSE};
-    GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
-    if (linkStatus[0] != GLES20.GL_TRUE) {
-      Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
-      throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
-    }
-    // According to the documentation of glLinkProgram():
-    // "After the link operation, applications are free to modify attached shader objects, compile
-    // attached shader objects, detach shader objects, delete shader objects, and attach additional
-    // shader objects. None of these operations affects the information log or the program that is
-    // part of the program object."
-    // But in practice, detaching shaders from the program seems to break some devices. Deleting the
-    // shaders are fine however - it will delete them when they are no longer attached to a program.
-    GLES20.glDeleteShader(vertexShader);
-    GLES20.glDeleteShader(fragmentShader);
-    GlUtil.checkNoGLES2Error("Creating GlShader");
-  }
-
-  public int getAttribLocation(String label) {
-    if (program == -1) {
-      throw new RuntimeException("The program has been released");
-    }
-    int location = GLES20.glGetAttribLocation(program, label);
-    if (location < 0) {
-      throw new RuntimeException("Could not locate '" + label + "' in program");
-    }
-    return location;
-  }
-
-  /**
-   * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
-   * |buffer| with |dimension| number of components per vertex.
-   */
-  public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
-    setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
-  }
-
-  /**
-   * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
-   * |buffer| with |dimension| number of components per vertex and specified |stride|.
-   */
-  public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
-    if (program == -1) {
-      throw new RuntimeException("The program has been released");
-    }
-    int location = getAttribLocation(label);
-    GLES20.glEnableVertexAttribArray(location);
-    GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
-    GlUtil.checkNoGLES2Error("setVertexAttribArray");
-  }
-
-  public int getUniformLocation(String label) {
-    if (program == -1) {
-      throw new RuntimeException("The program has been released");
-    }
-    int location = GLES20.glGetUniformLocation(program, label);
-    if (location < 0) {
-      throw new RuntimeException("Could not locate uniform '" + label + "' in program");
-    }
-    return location;
-  }
-
-  public void useProgram() {
-    if (program == -1) {
-      throw new RuntimeException("The program has been released");
-    }
-    GLES20.glUseProgram(program);
-    GlUtil.checkNoGLES2Error("glUseProgram");
-  }
-
-  public void release() {
-    Logging.d(TAG, "Deleting shader.");
-    // Delete program, automatically detaching any shaders from it.
-    if (program != -1) {
-      GLES20.glDeleteProgram(program);
-      program = -1;
-    }
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.opengl.GLES20;
-
-/**
- * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
- * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
- * conversion.
- */
-// TODO(magjed): Add unittests for this class.
-public class GlTextureFrameBuffer {
-  private final int frameBufferId;
-  private final int textureId;
-  private final int pixelFormat;
-  private int width;
-  private int height;
-
-  /**
-   * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
-   * when calling this function. The framebuffer is not complete until setSize() is called.
-   */
-  public GlTextureFrameBuffer(int pixelFormat) {
-    switch (pixelFormat) {
-      case GLES20.GL_LUMINANCE:
-      case GLES20.GL_RGB:
-      case GLES20.GL_RGBA:
-        this.pixelFormat = pixelFormat;
-        break;
-      default:
-        throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
-    }
-
-    // Create texture.
-    textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
-    this.width = 0;
-    this.height = 0;
-
-    // Create framebuffer object.
-    final int frameBuffers[] = new int[1];
-    GLES20.glGenFramebuffers(1, frameBuffers, 0);
-    frameBufferId = frameBuffers[0];
-  }
-
-  /**
-   * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
-   * EGLContext must be bound on the current thread when calling this function. Must be called at
-   * least once before using the framebuffer. May be called multiple times to change size.
-   */
-  public void setSize(int width, int height) {
-    if (width == 0 || height == 0) {
-      throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
-    }
-    if (width == this.width && height == this.height) {
-      return;
-    }
-    this.width = width;
-    this.height = height;
-
-    // Allocate texture.
-    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
-    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
-    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
-        GLES20.GL_UNSIGNED_BYTE, null);
-    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
-    GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
-
-    // Attach the texture to the framebuffer as color attachment.
-    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
-    GLES20.glFramebufferTexture2D(
-        GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
-
-    // Check that the framebuffer is in a good state.
-    final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
-    if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
-      throw new IllegalStateException("Framebuffer not complete, status: " + status);
-    }
-
-    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
-  }
-
-  public int getWidth() {
-    return width;
-  }
-
-  public int getHeight() {
-    return height;
-  }
-
-  public int getFrameBufferId() {
-    return frameBufferId;
-  }
-
-  public int getTextureId() {
-    return textureId;
-  }
-
-  /**
-   * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
-   * this function. This object should not be used after this call.
-   */
-  public void release() {
-    GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
-    GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
-    width = 0;
-    height = 0;
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/GlUtil.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.opengl.GLES20;
-
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.FloatBuffer;
-
-/**
- * Some OpenGL static utility functions.
- */
-public class GlUtil {
-  private GlUtil() {}
-
-  // Assert that no OpenGL ES 2.0 error has been raised.
-  public static void checkNoGLES2Error(String msg) {
-    int error = GLES20.glGetError();
-    if (error != GLES20.GL_NO_ERROR) {
-      throw new RuntimeException(msg + ": GLES20 error: " + error);
-    }
-  }
-
-  public static FloatBuffer createFloatBuffer(float[] coords) {
-    // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
-    ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
-    bb.order(ByteOrder.nativeOrder());
-    FloatBuffer fb = bb.asFloatBuffer();
-    fb.put(coords);
-    fb.position(0);
-    return fb;
-  }
-
-  /**
-   * Generate texture with standard parameters.
-   */
-  public static int generateTexture(int target) {
-    final int textureArray[] = new int[1];
-    GLES20.glGenTextures(1, textureArray, 0);
-    final int textureId = textureArray[0];
-    GLES20.glBindTexture(target, textureId);
-    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
-    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
-    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
-    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
-    checkNoGLES2Error("generateTexture");
-    return textureId;
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
-import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
-import static org.webrtc.MediaCodecUtils.NVIDIA_PREFIX;
-import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
-
-import android.media.MediaCodecInfo;
-import android.media.MediaCodecInfo.CodecCapabilities;
-import android.media.MediaCodecList;
-import android.os.Build;
-
-/** Factory for Android hardware VideoDecoders. */
-@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
-public class HardwareVideoDecoderFactory implements VideoDecoderFactory {
-  private static final String TAG = "HardwareVideoDecoderFactory";
-
-  private final EglBase.Context sharedContext;
-  private final boolean fallbackToSoftware;
-
-  /** Creates a HardwareVideoDecoderFactory that does not use surface textures. */
-  @Deprecated // Not removed yet to avoid breaking callers.
-  public HardwareVideoDecoderFactory() {
-    this(null);
-  }
-
-  /**
-   * Creates a HardwareVideoDecoderFactory that supports surface texture rendering using the given
-   * shared context.  The context may be null.  If it is null, then surface support is disabled.
-   */
-  public HardwareVideoDecoderFactory(EglBase.Context sharedContext) {
-    this(sharedContext, true /* fallbackToSoftware */);
-  }
-
-  HardwareVideoDecoderFactory(EglBase.Context sharedContext, boolean fallbackToSoftware) {
-    this.sharedContext = sharedContext;
-    this.fallbackToSoftware = fallbackToSoftware;
-  }
-
-  @Override
-  public VideoDecoder createDecoder(String codecType) {
-    VideoCodecType type = VideoCodecType.valueOf(codecType);
-    MediaCodecInfo info = findCodecForType(type);
-
-    if (info == null) {
-      // No hardware support for this type.
-      // TODO(andersc): This is for backwards compatibility. Remove when clients have migrated to
-      // new DefaultVideoEncoderFactory.
-      if (fallbackToSoftware) {
-        SoftwareVideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
-        return softwareVideoDecoderFactory.createDecoder(codecType);
-      } else {
-        return null;
-      }
-    }
-
-    CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
-    return new HardwareVideoDecoder(info.getName(), type,
-        MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
-        sharedContext);
-  }
-
-  private MediaCodecInfo findCodecForType(VideoCodecType type) {
-    // HW decoding is not supported on builds before KITKAT.
-    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
-      return null;
-    }
-
-    for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
-      MediaCodecInfo info = null;
-      try {
-        info = MediaCodecList.getCodecInfoAt(i);
-      } catch (IllegalArgumentException e) {
-        Logging.e(TAG, "Cannot retrieve encoder codec info", e);
-      }
-
-      if (info == null || info.isEncoder()) {
-        continue;
-      }
-
-      if (isSupportedCodec(info, type)) {
-        return info;
-      }
-    }
-    return null; // No support for this type.
-  }
-
-  // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
-  private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) {
-    if (!MediaCodecUtils.codecSupportsType(info, type)) {
-      return false;
-    }
-    // Check for a supported color format.
-    if (MediaCodecUtils.selectColorFormat(
-            MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
-        == null) {
-      return false;
-    }
-    return isHardwareSupported(info, type);
-  }
-
-  private boolean isHardwareSupported(MediaCodecInfo info, VideoCodecType type) {
-    String name = info.getName();
-    switch (type) {
-      case VP8:
-        // QCOM, Intel, Exynos, and Nvidia all supported for VP8.
-        return name.startsWith(QCOM_PREFIX) || name.startsWith(INTEL_PREFIX)
-            || name.startsWith(EXYNOS_PREFIX) || name.startsWith(NVIDIA_PREFIX);
-      case VP9:
-        // QCOM and Exynos supported for VP9.
-        return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX);
-      case H264:
-        // QCOM, Intel, and Exynos supported for H264.
-        return name.startsWith(QCOM_PREFIX) || name.startsWith(INTEL_PREFIX)
-            || name.startsWith(EXYNOS_PREFIX);
-      default:
-        return false;
-    }
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
+++ /dev/null
@@ -1,285 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
-import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
-import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
-
-import android.media.MediaCodecInfo;
-import android.media.MediaCodecList;
-import android.os.Build;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/** Factory for android hardware video encoders. */
-@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
-public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
-  private static final String TAG = "HardwareVideoEncoderFactory";
-
-  // Forced key frame interval - used to reduce color distortions on Qualcomm platforms.
-  private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
-  private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
-  private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
-
-  // List of devices with poor H.264 encoder quality.
-  // HW H.264 encoder on below devices has poor bitrate control - actual
-  // bitrates deviates a lot from the target value.
-  private static final List<String> H264_HW_EXCEPTION_MODELS =
-      Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4");
-
-  private final EglBase14.Context sharedContext;
-  private final boolean enableIntelVp8Encoder;
-  private final boolean enableH264HighProfile;
-  private final boolean fallbackToSoftware;
-
-  public HardwareVideoEncoderFactory(
-      EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
-    this(
-        sharedContext, enableIntelVp8Encoder, enableH264HighProfile, true /* fallbackToSoftware */);
-  }
-
-  HardwareVideoEncoderFactory(EglBase.Context sharedContext, boolean enableIntelVp8Encoder,
-      boolean enableH264HighProfile, boolean fallbackToSoftware) {
-    // Texture mode requires EglBase14.
-    if (sharedContext instanceof EglBase14.Context) {
-      this.sharedContext = (EglBase14.Context) sharedContext;
-    } else {
-      Logging.w(TAG, "No shared EglBase.Context.  Encoders will not use texture mode.");
-      this.sharedContext = null;
-    }
-    this.enableIntelVp8Encoder = enableIntelVp8Encoder;
-    this.enableH264HighProfile = enableH264HighProfile;
-    this.fallbackToSoftware = fallbackToSoftware;
-  }
-
-  @Deprecated
-  public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
-    this(null, enableIntelVp8Encoder, enableH264HighProfile);
-  }
-
-  @Override
-  public VideoEncoder createEncoder(VideoCodecInfo input) {
-    VideoCodecType type = VideoCodecType.valueOf(input.name);
-    MediaCodecInfo info = findCodecForType(type);
-
-    if (info == null) {
-      // No hardware support for this type.
-      // TODO(andersc): This is for backwards compatibility. Remove when clients have migrated to
-      // new DefaultVideoEncoderFactory.
-      if (fallbackToSoftware) {
-        SoftwareVideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
-        return softwareVideoEncoderFactory.createEncoder(input);
-      } else {
-        return null;
-      }
-    }
-
-    String codecName = info.getName();
-    String mime = type.mimeType();
-    Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat(
-        MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime));
-    Integer yuvColorFormat = MediaCodecUtils.selectColorFormat(
-        MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime));
-
-    if (type == VideoCodecType.H264) {
-      boolean isHighProfile = isSameH264Profile(input.params, getCodecProperties(type, true))
-          && isH264HighProfileSupported(info);
-      boolean isBaselineProfile = isSameH264Profile(input.params, getCodecProperties(type, false));
-
-      if (!isHighProfile && !isBaselineProfile) {
-        return null;
-      }
-    }
-
-    return new HardwareVideoEncoder(codecName, type, surfaceColorFormat, yuvColorFormat,
-        input.params, getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName),
-        createBitrateAdjuster(type, codecName), sharedContext);
-  }
-
-  @Override
-  public VideoCodecInfo[] getSupportedCodecs() {
-    List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
-    // Generate a list of supported codecs in order of preference:
-    // VP8, VP9, H264 (high profile), and H264 (baseline profile).
-    for (VideoCodecType type :
-        new VideoCodecType[] {VideoCodecType.VP8, VideoCodecType.VP9, VideoCodecType.H264}) {
-      MediaCodecInfo codec = findCodecForType(type);
-      if (codec != null) {
-        String name = type.name();
-        if (type == VideoCodecType.H264 && isH264HighProfileSupported(codec)) {
-          supportedCodecInfos.add(new VideoCodecInfo(name, getCodecProperties(type, true)));
-        }
-
-        supportedCodecInfos.add(new VideoCodecInfo(name, getCodecProperties(type, false)));
-      }
-    }
-
-    // TODO(andersc): This is for backwards compatibility. Remove when clients have migrated to
-    // new DefaultVideoEncoderFactory.
-    if (fallbackToSoftware) {
-      for (VideoCodecInfo info : SoftwareVideoEncoderFactory.supportedCodecs()) {
-        if (!supportedCodecInfos.contains(info)) {
-          supportedCodecInfos.add(info);
-        }
-      }
-    }
-
-    return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
-  }
-
-  private MediaCodecInfo findCodecForType(VideoCodecType type) {
-    for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
-      MediaCodecInfo info = null;
-      try {
-        info = MediaCodecList.getCodecInfoAt(i);
-      } catch (IllegalArgumentException e) {
-        Logging.e(TAG, "Cannot retrieve encoder codec info", e);
-      }
-
-      if (info == null || !info.isEncoder()) {
-        continue;
-      }
-
-      if (isSupportedCodec(info, type)) {
-        return info;
-      }
-    }
-    return null; // No support for this type.
-  }
-
-  // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
-  private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) {
-    if (!MediaCodecUtils.codecSupportsType(info, type)) {
-      return false;
-    }
-    // Check for a supported color format.
-    if (MediaCodecUtils.selectColorFormat(
-            MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
-        == null) {
-      return false;
-    }
-    return isHardwareSupportedInCurrentSdk(info, type);
-  }
-
-  // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
-  // current SDK.
-  private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecType type) {
-    switch (type) {
-      case VP8:
-        return isHardwareSupportedInCurrentSdkVp8(info);
-      case VP9:
-        return isHardwareSupportedInCurrentSdkVp9(info);
-      case H264:
-        return isHardwareSupportedInCurrentSdkH264(info);
-    }
-    return false;
-  }
-
-  private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
-    String name = info.getName();
-    // QCOM Vp8 encoder is supported in KITKAT or later.
-    return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
-        // Exynos VP8 encoder is supported in M or later.
-        || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
-        // Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled.
-        || (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
-               && enableIntelVp8Encoder);
-  }
-
-  private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
-    String name = info.getName();
-    return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX))
-        // Both QCOM and Exynos VP9 encoders are supported in N or later.
-        && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
-  }
-
-  private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
-    // First, H264 hardware might perform poorly on this model.
-    if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
-      return false;
-    }
-    String name = info.getName();
-    // QCOM H264 encoder is supported in KITKAT or later.
-    return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
-        // Exynos H264 encoder is supported in LOLLIPOP or later.
-        || (name.startsWith(EXYNOS_PREFIX)
-               && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP);
-  }
-
-  private int getKeyFrameIntervalSec(VideoCodecType type) {
-    switch (type) {
-      case VP8: // Fallthrough intended.
-      case VP9:
-        return 100;
-      case H264:
-        return 20;
-    }
-    throw new IllegalArgumentException("Unsupported VideoCodecType " + type);
-  }
-
-  private int getForcedKeyFrameIntervalMs(VideoCodecType type, String codecName) {
-    if (type == VideoCodecType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
-      if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
-          || Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
-        return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
-      } else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
-        return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
-      } else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
-        return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
-      }
-    }
-    // Other codecs don't need key frame forcing.
-    return 0;
-  }
-
-  private BitrateAdjuster createBitrateAdjuster(VideoCodecType type, String codecName) {
-    if (codecName.startsWith(EXYNOS_PREFIX)) {
-      if (type == VideoCodecType.VP8) {
-        // Exynos VP8 encoders need dynamic bitrate adjustment.
-        return new DynamicBitrateAdjuster();
-      } else {
-        // Exynos VP9 and H264 encoders need framerate-based bitrate adjustment.
-        return new FramerateBitrateAdjuster();
-      }
-    }
-    // Other codecs don't need bitrate adjustment.
-    return new BaseBitrateAdjuster();
-  }
-
-  private boolean isH264HighProfileSupported(MediaCodecInfo info) {
-    return enableH264HighProfile && info.getName().startsWith(QCOM_PREFIX);
-  }
-
-  private Map<String, String> getCodecProperties(VideoCodecType type, boolean highProfile) {
-    switch (type) {
-      case VP8:
-      case VP9:
-        return new HashMap<String, String>();
-      case H264:
-        Map<String, String> properties = new HashMap<>();
-        properties.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
-        properties.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
-        properties.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
-            highProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
-                        : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
-        return properties;
-      default:
-        throw new IllegalArgumentException("Unsupported codec: " + type);
-    }
-  }
-
-  private static native boolean isSameH264Profile(
-      Map<String, String> params1, Map<String, String> params2);
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/IceCandidate.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/**
- * Representation of a single ICE Candidate, mirroring
- * {@code IceCandidateInterface} in the C++ API.
- */
-public class IceCandidate {
-  public final String sdpMid;
-  public final int sdpMLineIndex;
-  public final String sdp;
-  public final String serverUrl;
-
-  public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
-    this.sdpMid = sdpMid;
-    this.sdpMLineIndex = sdpMLineIndex;
-    this.sdp = sdp;
-    this.serverUrl = "";
-  }
-
-  @CalledByNative
-  IceCandidate(String sdpMid, int sdpMLineIndex, String sdp, String serverUrl) {
-    this.sdpMid = sdpMid;
-    this.sdpMLineIndex = sdpMLineIndex;
-    this.sdp = sdp;
-    this.serverUrl = serverUrl;
-  }
-
-  @Override
-  public String toString() {
-    return sdpMid + ":" + sdpMLineIndex + ":" + sdp + ":" + serverUrl;
-  }
-
-  @CalledByNative
-  String getSdpMid() {
-    return sdpMid;
-  }
-
-  @CalledByNative
-  String getSdp() {
-    return sdp;
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import java.nio.ByteBuffer;
-import org.webrtc.VideoFrame.I420Buffer;
-
-/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
-public class JavaI420Buffer implements VideoFrame.I420Buffer {
-  private final int width;
-  private final int height;
-  private final ByteBuffer dataY;
-  private final ByteBuffer dataU;
-  private final ByteBuffer dataV;
-  private final int strideY;
-  private final int strideU;
-  private final int strideV;
-  private final Runnable releaseCallback;
-  private final Object refCountLock = new Object();
-
-  private int refCount;
-
-  private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
-      int strideU, ByteBuffer dataV, int strideV, Runnable releaseCallback) {
-    this.width = width;
-    this.height = height;
-    this.dataY = dataY;
-    this.dataU = dataU;
-    this.dataV = dataV;
-    this.strideY = strideY;
-    this.strideU = strideU;
-    this.strideV = strideV;
-    this.releaseCallback = releaseCallback;
-
-    this.refCount = 1;
-  }
-
-  /** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */
-  public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY,
-      ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV, Runnable releaseCallback) {
-    if (dataY == null || dataU == null || dataV == null) {
-      throw new IllegalArgumentException("Data buffers cannot be null.");
-    }
-    if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) {
-      throw new IllegalArgumentException("Data buffers must be direct byte buffers.");
-    }
-
-    // Slice the buffers to prevent external modifications to the position / limit of the buffer.
-    // Note that this doesn't protect the contents of the buffers from modifications.
-    dataY = dataY.slice();
-    dataU = dataU.slice();
-    dataV = dataV.slice();
-
-    final int chromaHeight = (height + 1) / 2;
-    final int minCapacityY = strideY * height;
-    final int minCapacityU = strideU * chromaHeight;
-    final int minCapacityV = strideV * chromaHeight;
-    if (dataY.capacity() < minCapacityY) {
-      throw new IllegalArgumentException("Y-buffer must be at least " + minCapacityY + " bytes.");
-    }
-    if (dataU.capacity() < minCapacityU) {
-      throw new IllegalArgumentException("U-buffer must be at least " + minCapacityU + " bytes.");
-    }
-    if (dataV.capacity() < minCapacityV) {
-      throw new IllegalArgumentException("V-buffer must be at least " + minCapacityV + " bytes.");
-    }
-
-    return new JavaI420Buffer(
-        width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback);
-  }
-
-  /** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
-  public static JavaI420Buffer allocate(int width, int height) {
-    int chromaHeight = (height + 1) / 2;
-    int strideUV = (width + 1) / 2;
-    int yPos = 0;
-    int uPos = yPos + width * height;
-    int vPos = uPos + strideUV * chromaHeight;
-
-    ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
-
-    buffer.position(yPos);
-    buffer.limit(uPos);
-    ByteBuffer dataY = buffer.slice();
-
-    buffer.position(uPos);
-    buffer.limit(vPos);
-    ByteBuffer dataU = buffer.slice();
-
-    buffer.position(vPos);
-    buffer.limit(vPos + strideUV * chromaHeight);
-    ByteBuffer dataV = buffer.slice();
-
-    return new JavaI420Buffer(
-        width, height, dataY, width, dataU, strideUV, dataV, strideUV, null /* releaseCallback */);
-  }
-
-  @Override
-  public int getWidth() {
-    return width;
-  }
-
-  @Override
-  public int getHeight() {
-    return height;
-  }
-
-  @Override
-  public ByteBuffer getDataY() {
-    // Return a slice to prevent relative reads from changing the position.
-    return dataY.slice();
-  }
-
-  @Override
-  public ByteBuffer getDataU() {
-    // Return a slice to prevent relative reads from changing the position.
-    return dataU.slice();
-  }
-
-  @Override
-  public ByteBuffer getDataV() {
-    // Return a slice to prevent relative reads from changing the position.
-    return dataV.slice();
-  }
-
-  @Override
-  public int getStrideY() {
-    return strideY;
-  }
-
-  @Override
-  public int getStrideU() {
-    return strideU;
-  }
-
-  @Override
-  public int getStrideV() {
-    return strideV;
-  }
-
-  @Override
-  public I420Buffer toI420() {
-    retain();
-    return this;
-  }
-
-  @Override
-  public void retain() {
-    synchronized (refCountLock) {
-      ++refCount;
-    }
-  }
-
-  @Override
-  public void release() {
-    synchronized (refCountLock) {
-      if (--refCount == 0 && releaseCallback != null) {
-        releaseCallback.run();
-      }
-    }
-  }
-
-  @Override
-  public VideoFrame.Buffer cropAndScale(
-      int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
-    return VideoFrame.cropAndScaleI420(
-        this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
-  }
-}
deleted file mode 100644
--- a/media/webrtc/trunk/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
+++ /dev/null
@@ -1,751 +0,0 @@
-/*
- *  Copyright 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-import android.media.MediaCodec;
-import android.media.MediaCodecInfo;
-import android.media.MediaCodecInfo.CodecCapabilities;
-import android.media.MediaCodecList;
-import android.media.MediaFormat;
-import android.os.Build;
-import android.os.SystemClock;
-import android.view.Surface;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Queue;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-
-// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
-// This class is an implementation detail of the Java PeerConnection API.
-@SuppressWarnings("deprecation")
-public class MediaCodecVideoDecoder {
-  // This class is constructed, operated, and destroyed by its C++ incarnation,
-  // so the class and its methods have non-public visibility.  The API this
-  // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
-  // possibly to minimize the amount of translation work necessary.
-
-  private static final String TAG = "MediaCodecVideoDecoder";
-  private static final long MAX_DECODE_TIME_MS = 200;
-
-  // TODO(magjed): Use MediaFormat constants when part of the public API.
-  private static final String FORMAT_KEY_STRIDE = "stride";
-  private static final String FORMAT_KEY_SLICE_HEIGHT = "slice-height";
-  private static final String FORMAT_KEY_CROP_LEFT = "crop-left";
-  private static final String FORMAT_KEY_CROP_RIGHT = "crop-right";
-  private static final String FORMAT_KEY_CROP_TOP = "crop-top";
-  private static final String FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
-
-  // Tracks webrtc::VideoCodecType.
-  public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
-
-  // Timeout for input buffer dequeue.
-  private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
-  // Timeout for codec releasing.
-  private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
-  // Max number of output buffers queued before starting to drop decoded frames.
-  private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
-  // Active running decoder instance. Set in initDecode() (called from native code)
-  // and reset to null in release() call.
-  private static MediaCodecVideoDecoder runningInstance = null;
-  private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
-  private static int codecErrors = 0;
-  // List of disabled codec types - can be set from application.
-  private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
-
-  private Thread mediaCodecThread;
-  private MediaCodec mediaCodec;
-  private ByteBuffer[] inputBuffers;
-  private ByteBuffer[] outputBuffers;
-  private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
-  private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
-  private static final String H264_MIME_TYPE = "video/avc";
-  // List of supported HW VP8 decoders.
-  private static final String[] supportedVp8HwCodecPrefixes = {
-      "OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."};
-  // List of supported HW VP9 decoders.
-  private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."};
-  // List of supported HW H.264 decoders.
-  private static final String[] supportedH264HwCodecPrefixes = {
-      "OMX.qcom.", "OMX.Intel.", "OMX.Exynos."};
-  // List of supported HW H.264 high profile decoders.
-  private static final String supportedQcomH264HighProfileHwCodecPrefix = "OMX.qcom.";
-  private static final String supportedExynosH264HighProfileHwCodecPrefix = "OMX.Exynos.";
-
-  // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
-  // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
-  private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
-  private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
-  private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
-  private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
-  // Allowable color formats supported by codec - in order of preference.
-  private static final List<Integer> supportedColorList = Arrays.asList(
-      CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
-      CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
-      COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
-      COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
-      COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
-
-  private int colorFormat;
-  private int width;
-  private int height;
-  private int stride;
-  private int sliceHeight;
-  private boolean hasDecodedFirstFrame;
-  private final Queue<TimeStamps> decodeStartTimeMs = new ArrayDeque<TimeStamps>();
-  private boolean useSurface;
-
-  // The below variables are only used when decoding to a Surface.
-  private TextureListener textureListener;
-  private int droppedFrames;
-  private Surface surface = null;
-  private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
-      new ArrayDeque<DecodedOutputBuffer>();
-
-  // MediaCodec error handler - invoked when critical error happens which may prevent
-  // further use of media codec API. Now it means that one of media codec instances
-  // is hanging and can no longer be used in the next call.
-  public static interface MediaCodecVideoDecoderErrorCallback {
-    void onMediaCodecVideoDecoderCriticalError(int codecErrors);
-  }
-
-  public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
-    Logging.d(TAG, "Set error callback");
-    MediaCodecVideoDecoder.errorCallback = errorCallback;
-  }
-
-  // Functions to disable HW decoding - can be called from applications for platforms
-  // which have known HW decoding problems.
-  public static void disableVp8HwCodec() {
-    Logging.w(TAG, "VP8 decoding is disabled by application.");
-    hwDecoderDisabledTypes.add(VP8_MIME_TYPE);
-  }
-
-  public static void disableVp9HwCodec() {
-    Logging.w(TAG, "VP9 decoding is disabled by application.");
-    hwDecoderDisabledTypes.add(VP9_MIME_TYPE);
-  }
-
-  public static void disableH264HwCodec() {
-    Logging.w(TAG, "H.264 decoding is disabled by application.");
-    hwDecoderDisabledTypes.add(H264_MIME_TYPE);
-  }
-
-  // Functions to query if HW decoding is supported.
-  public static boolean isVp8HwSupported() {
-    return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
-        && (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
-  }
-
-  public static boolean isVp9HwSupported() {
-    return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
-        && (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
-  }
-
-  public static boolean isH264HwSupported() {
-    return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
-        && (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
-  }
-
-  public static boolean isH264HighProfileHwSupported() {
-    if (hwDecoderDisabledTypes.contains(H264_MIME_TYPE)) {
-      return false;
-    }
-    // Support H.264 HP decoding on QCOM chips for Android L and above.
-    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
-        && findDecoder(H264_MIME_TYPE, new String[] {supportedQcomH264HighProfileHwCodecPrefix})
-            != null) {
-      return true;
-    }
-    // Support H.264 HP decoding on Exynos chips for Android M and above.
-    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
-        && findDecoder(H264_MIME_TYPE, new String[] {supportedExynosH264HighProfileHwCodecPrefix})
-            != null) {
-      return true;
-    }
-    return false;
-  }
-
-  public static void printStackTrace() {
-    if (runningInstance != null && runningInstance.mediaCodecThread != null) {
-      StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
-      if (mediaCodecStackTraces.length > 0) {
-        Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
-        for (StackTraceElement stackTrace : mediaCodecStackTraces) {
-          Logging.d(TAG, stackTrace.toString());
-        }
-      }
-    }
-  }
-
-  // Helper struct for findDecoder() below.
-  private static class DecoderProperties {
-    public DecoderProperties(String codecName, int colorFormat) {
-      this.codecName = codecName;
-      this.colorFormat = colorFormat;
-    }
-    public final String codecName; // OpenMax component name for VP8 codec.
-    public final int colorFormat; // Color format supported by codec.
-  }
-
-  private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
-    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
-      return null; // MediaCodec.setParameters is missing.
-    }
-    Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
-    for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
-      MediaCodecInfo info = null;
-      try {
-        info = MediaCodecList.getCodecInfoAt(i);
-      } catch (IllegalArgumentException e) {
-        Logging.e(TAG, "Cannot retrieve decoder codec info", e);
-      }
-      if (info == null || info.isEncoder()) {
-        continue;
-      }
-      String name = null;
-      for (String mimeType : info.getSupportedTypes()) {
-        if (mimeType.equals(mime)) {
-          name = info.getName();
-          break;
-        }
-      }
-      if (name == null) {
-        continue; // No HW support in this codec; try the next one.
-      }
-      Logging.d(TAG, "Found candidate decoder " + name);
-
-      // Check if this is supported decoder.
-      boolean supportedCodec = false;
-      for (String codecPrefix : supportedCodecPrefixes) {
-        if (name.startsWith(codecPrefix)) {
-          supportedCodec = true;
-          break;
-        }
-      }
-      if (!supportedCodec) {
-        continue;
-      }
-
-      // Check if codec supports either yuv420 or nv12.
-      CodecCapabilities capabilities;
-      try {
-        capabilities = info.getCapabilitiesForType(mime);
-      } catch (IllegalArgumentException e) {
-        Logging.e(TAG, "Cannot retrieve decoder capabilities", e);
-        continue;
-      }
-      for (int colorFormat : capabilities.colorFormats) {
-        Logging.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
-      }
-      for (int supportedColorFormat : supportedColorList) {
-        for (int codecColorFormat : capabilities.colorFormats) {
-          if (codecColorFormat == supportedColorFormat) {
-            // Found supported HW decoder.
-            Logging.d(TAG, "Found target decoder " + name + ". Color: 0x"
-                    + Integer.toHexString(codecColorFormat));
-            return new DecoderProperties(name, codecColorFormat);
-          }
-        }
-      }
-    }
-    Logging.d(TAG, "No HW decoder found for mime " + mime);
-    return null; // No HW decoder.
-  }
-
-  private void checkOnMediaCodecThread() throws IllegalStateException {
-    if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
-      throw new IllegalStateException("MediaCodecVideoDecoder previously operated on "
-          + mediaCodecThread + " but is now called on " + Thread.currentThread());
-    }
-  }
-
-  // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
-  private boolean initDecode(
-      VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
-    if (mediaCodecThread != null) {
-      throw new RuntimeException("initDecode: Forgot to release()?");
-    }
-
-    String mime = null;
-    useSurface = (surfaceTextureHelper != null);
-    String[] supportedCodecPrefixes = null;
-    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
-      mime = VP8_MIME_TYPE;
-      supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
-    } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
-      mime = VP9_MIME_TYPE;
-      supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
-    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
-      mime = H264_MIME_TYPE;
-      supportedCodecPrefixes = supportedH264HwCodecPrefixes;
-    } else {
-      throw new RuntimeException("initDecode: Non-supported codec " + type);
-    }
-    DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
-    if (properties == null) {
-      throw new RuntimeException("Cannot find HW decoder for " + type);
-    }
-
-    Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x"
-            + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface);
-
-    runningInstance = this; // Decoder is now running and can be queried for stack traces.
-    mediaCodecThread = Thread.currentThread();
-    try {
-      this.width = width;
-      this.height = height;
-      stride = width;
-      sliceHeight = height;
-
-      if (useSurface) {
-        textureListener = new TextureListener(surfaceTextureHelper);
-        surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
-      }
-
-      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
-      if (!useSurface) {
-        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
-      }
-      Logging.d(TAG, "  Format: " + format);
-      mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName);
-      if (mediaCodec == null) {
-        Logging.e(TAG, "Can not create media decoder");
-        return false;
-      }
-      mediaCodec.configure(format, surface, null, 0);
-      mediaCodec.start();
-
-      colorFormat = properties.colorFormat;
-      outputBuffers = mediaCodec.getOutputBuffers();
-      inputBuffers = mediaCodec.getInputBuffers();
-      decodeStartTimeMs.clear();
-      hasDecodedFirstFrame = false;
-      dequeuedSurfaceOutputBuffers.clear();
-      droppedFrames = 0;
-      Logging.d(TAG,
-          "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
-      return true;
-    } catch (IllegalStateException e) {
-      Logging.e(TAG, "initDecode failed", e);
-      return false;
-    }
-  }
-
-  // Resets the decoder so it can start decoding frames with new resolution.
-  // Flushes MediaCodec and clears decoder output buffers.
-  private void reset(int width, int height) {
-    if (mediaCodecThread == null || mediaCodec == null) {
-      throw new RuntimeException("Incorrect reset call for non-initialized decoder.");
-    }
-    Logging.d(TAG, "Java reset: " + width + " x " + height);
-
-    mediaCodec.flush();
-
-    this.width = width;
-    this.height = height;
-    decodeStartTimeMs.clear();
-    dequeuedSurfaceOutputBuffers.clear();
-    hasDecodedFirstFrame = false;
-    droppedFrames = 0;
-  }
-
-  private void release() {
-    Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
-    checkOnMediaCodecThread();
-
-    // Run Mediacodec stop() and release() on separate thread since sometime
-    // Mediacodec.stop() may hang.
-    final CountDownLatch releaseDone = new CountDownLatch(1);
-
-    Runnable runMediaCodecRelease = new Runnable() {
-      @Override
-      public void run() {
-        try {
-          Logging.d(TAG, "Java releaseDecoder on release thread");
-          mediaCodec.stop();
-          mediaCodec.release();
-          Logging.d(TAG, "Java releaseDecoder on release thread done");
-        } catch (Exception e) {
-          Logging.e(TAG, "Media decoder release failed", e);
-        }
-        releaseDone.countDown();
-      }
-    };
-    new Thread(runMediaCodecRelease).start();
-
-    if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
-      Logging.e(TAG, "Media decoder release timeout");
-      codecErrors++;
-      if (errorCallback != null) {
-        Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
-        errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
-      }
-    }
-
-    mediaCodec = null;
-    mediaCodecThread = null;
-    runningInstance = null;
-    if (useSurface) {
-      surface.release();
-      surface = null;
-      textureListener.release();
-    }
-    Logging.d(TAG, "Java releaseDecoder done");
-  }
-
-  // Dequeue an input buffer and return its index, -1 if no input buffer is
-  // available, or -2 if the codec is no longer operative.
-  private int dequeueInputBuffer() {
-    checkOnMediaCodecThread();
-    try {
-      return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
-    } catch (IllegalStateException e) {
-      Logging.e(TAG, "dequeueIntputBuffer failed", e);
-      return -2;
-    }
-  }
-
-  private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
-      long timeStampMs, long ntpTimeStamp) {
-    checkOnMediaCodecThread();
-    try {
-      inputBuffers[inputBufferIndex].position(0);
-      inputBuffers[inputBufferIndex].limit(size);
-      decodeStartTimeMs.add(
-          new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp));
-      mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
-      return true;
-    } catch (IllegalStateException e) {
-      Logging.e(TAG, "decode failed", e);
-      return false;
-    }
-  }
-
-  private static class TimeStamps {
-    public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
-      this.decodeStartTimeMs = decodeStartTimeMs;
-      this.timeStampMs = timeStampMs;
-      this.ntpTimeStampMs = ntpTimeStampMs;
-    }
-    // Time when this frame was queued for decoding.
-    private final long decodeStartTimeMs;
-    // Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame.
-    private final long timeStampMs;
-    // Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame.
-    private final long ntpTimeStampMs;
-  }
-
-  // Helper struct for dequeueOutputBuffer() below.
-  private static class DecodedOutputBuffer {
-    public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs,
-        long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
-      this.index = index;
-      this.offset = offset;
-      this.size = size;
-      this.presentationTimeStampMs = presentationTimeStampMs;
-      this.timeStampMs = timeStampMs;
-      this.ntpTimeStampMs = ntpTimeStampMs;
-      this.decodeTimeMs = decodeTime;
-      this.endDecodeTimeMs = endDecodeTime;
-    }
-
-    private final int index;
-    private final int offset;
-    private final int size;
-    // Presentation timestamp returned in dequeueOutputBuffer call.
-    private final long presentationTimeStampMs;
-    // C++ inputImage._timeStamp value for output frame.
-    private final long timeStampMs;
-    // C++ inputImage.ntp_time_ms_ value for output frame.
-    private final long ntpTimeStampMs;
-    // Number of ms it took to decode this frame.
-    private final long decodeTimeMs;
-    // System time when this frame decoding finished.
-    private final long endDecodeTimeMs;
-  }
-
-  // Helper struct for dequeueTextureBuffer() below.
-  private static class DecodedTextureBuffer {
-    private final int textureID;
-    private final float[] transformMatrix;
-    // Presentation timestamp returned in dequeueOutputBuffer call.
-    private final long presentationTimeStampMs;
-    // C++ inputImage._timeStamp value for output frame.
-    private final long timeStampMs;
-    // C++ inputImage.ntp_time_ms_ value for output frame.
-    private final long ntpTimeStampMs;
-    // Number of ms it took to decode this frame.
-    private final long decodeTimeMs;
-    // Interval from when the frame finished decoding until this buffer has been created.
-    // Since there is only one texture, this interval depend on the time from when
-    // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
-    // so that the texture can be updated with the next decoded frame.
-    private final long frameDelayMs;
-
-    // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
-    // that was dropped.
-    public DecodedTextureBuffer(int textureID, float[] transformMatrix,
-        long presentationTimeStampMs, long timeStampMs, long ntpTimeStampMs, long decodeTimeMs,
-        long frameDelay) {
-      this.textureID = textureID;
-      this.transformMatrix = transformMatrix;
-      this.presentationTimeStampMs = presentationTimeStampMs;
-      this.timeStampMs = timeStampMs;
-      this.ntpTimeStampMs = ntpTimeStampMs;
-      this.decodeTimeMs = decodeTimeMs;
-      this.frameDelayMs = frameDelay;
-    }
-  }
-
-  // Poll based texture listener.
-  private static class TextureListener
-      implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
-    private final SurfaceTextureHelper surfaceTextureHelper;
-    // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
-    private final Object newFrameLock = new Object();
-    // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
-    // onTextureFrameAvailable().
-    private DecodedOutputBuffer bufferToRender;
-    private DecodedTextureBuffer renderedBuffer;
-
-    public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
-      this.surfaceTextureHelper = surfaceTextureHelper;
-      surfaceTextureHelper.startListening(this);
-    }
-
-    public void addBufferToRender(DecodedOutputBuffer buffer) {
-      if (bufferToRender != null) {
-        Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture.");
-        throw new IllegalStateException("Waiting for a texture.");
-      }
-      bufferToRender = buffer;
-    }
-
-    public boolean isWaitingForTexture() {
-      synchronized (newFrameLock) {
-        return bufferToRender != null;
-      }
-    }
-
-    // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
-    @Override
-    public void onTextureFrameAvailable(
-        int oesTextureId, float[] transformMatrix, long timestampNs) {
-      synchronized (newFrameLock) {
-        if (renderedBuffer != null) {
-          Logging.e(
-              TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
-          throw new IllegalStateException("Already holding a texture.");
-        }
-        // |timestampNs| is always zero on some Android versions.
-        renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
-            bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs,
-            bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
-            SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
-        bufferToRender = null;
-        newFrameLock.notifyAll();
-      }
-    }
-
-    // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
-    @SuppressWarnings("WaitNotInLoop")
-    public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
-      synchronized (newFrameLock) {
-        if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
-          try {
-            newFrameLock.wait(timeoutMs);
-          } catch (InterruptedException e) {
-            // Restore the interrupted status by reinterrupting the thread.
-            Thread.currentThread().interrupt();
-          }
-        }
-        DecodedTextureBuffer returnedBuffer = renderedBuffer;
-        renderedBuffer = null;
-        return returnedBuffer;
-      }
-    }
-
-    public void release() {
-      // SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in
-      // progress is done. Therefore, the call must be outside any synchronized
-      // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
-      surfaceTextureHelper.stopListening();
-      synchronized (newFrameLock) {
-        if (renderedBuffer != null) {
-          surfaceTextureHelper.returnTextureFrame();
-          renderedBuffer = null;
-        }
-      }
-    }
-  }
-
-  // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
-  // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
-  // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
-  // upon codec error.
-  private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
-    checkOnMediaCodecThread();
-    if (decodeStartTimeMs.isEmpty()) {
-      return null;
-    }