aboutsummaryrefslogtreecommitdiff
path: root/webrtc/examples
diff options
context:
space:
mode:
Diffstat (limited to 'webrtc/examples')
-rw-r--r--webrtc/examples/android/media_demo/AndroidManifest.xml29
-rw-r--r--webrtc/examples/android/media_demo/README24
-rw-r--r--webrtc/examples/android/media_demo/build.xml92
-rw-r--r--webrtc/examples/android/media_demo/jni/jni_helpers.cc82
-rw-r--r--webrtc/examples/android/media_demo/jni/jni_helpers.h79
-rw-r--r--webrtc/examples/android/media_demo/jni/on_load.cc48
-rw-r--r--webrtc/examples/android/media_demo/jni/voice_engine_jni.cc423
-rw-r--r--webrtc/examples/android/media_demo/jni/voice_engine_jni.h31
-rw-r--r--webrtc/examples/android/media_demo/project.properties14
-rw-r--r--webrtc/examples/android/media_demo/res/drawable/logo.pngbin3305 -> 0 bytes
-rw-r--r--webrtc/examples/android/media_demo/res/layout/audiomenu.xml80
-rw-r--r--webrtc/examples/android/media_demo/res/layout/dropdownitems.xml17
-rw-r--r--webrtc/examples/android/media_demo/res/layout/mainmenu.xml26
-rw-r--r--webrtc/examples/android/media_demo/res/layout/settingsmenu.xml36
-rw-r--r--webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml5
-rw-r--r--webrtc/examples/android/media_demo/res/values/bools.xml13
-rw-r--r--webrtc/examples/android/media_demo/res/values/integers.xml13
-rw-r--r--webrtc/examples/android/media_demo/res/values/strings.xml41
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java156
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java39
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java123
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java321
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java22
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java32
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java129
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java49
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java117
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java210
-rw-r--r--webrtc/examples/androidapp/AndroidManifest.xml2
-rw-r--r--webrtc/examples/androidapp/res/values/strings.xml25
-rw-r--r--webrtc/examples/androidapp/res/xml/preferences.xml30
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java25
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java39
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java104
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java20
-rw-r--r--webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java90
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m2
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m29
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h3
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m36
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m11
-rw-r--r--webrtc/examples/peerconnection/client/conductor.cc16
-rw-r--r--webrtc/examples/peerconnection/client/conductor.h6
-rw-r--r--webrtc/examples/peerconnection/client/defaults.cc9
-rw-r--r--webrtc/examples/peerconnection/client/defaults.h6
-rw-r--r--webrtc/examples/peerconnection/client/flagdefs.h6
-rw-r--r--webrtc/examples/peerconnection/client/linux/main.cc12
-rw-r--r--webrtc/examples/peerconnection/client/linux/main_wnd.cc8
-rw-r--r--webrtc/examples/peerconnection/client/linux/main_wnd.h8
-rw-r--r--webrtc/examples/peerconnection/client/main_wnd.cc3
-rw-r--r--webrtc/examples/peerconnection/client/main_wnd.h9
-rw-r--r--webrtc/examples/peerconnection/client/peer_connection_client.cc4
-rw-r--r--webrtc/examples/peerconnection/client/peer_connection_client.h6
-rw-r--r--webrtc/examples/peerconnection/server/data_socket.h6
-rw-r--r--webrtc/examples/peerconnection/server/peer_channel.cc3
-rw-r--r--webrtc/examples/peerconnection/server/peer_channel.h6
-rw-r--r--webrtc/examples/peerconnection/server/utils.h6
-rw-r--r--webrtc/examples/stunserver/stunserver_main.cc2
60 files changed, 329 insertions, 2484 deletions
diff --git a/webrtc/examples/android/media_demo/AndroidManifest.xml b/webrtc/examples/android/media_demo/AndroidManifest.xml
deleted file mode 100644
index 62bf46076f..0000000000
--- a/webrtc/examples/android/media_demo/AndroidManifest.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- android:versionCode="1" package="org.webrtc.webrtcdemo" android:versionName="1.07">
- <application android:icon="@drawable/logo"
- android:label="@string/appName"
- android:debuggable="true">
- <activity android:name=".WebRTCDemo"
- android:theme="@android:style/Theme.Holo"
- android:label="@string/appName"
- android:screenOrientation="landscape"
- >
- <intent-filter>
- <action android:name="android.intent.action.MAIN" />
- <category android:name="android.intent.category.LAUNCHER" />
- <action android:name="android.intent.action.HEADSET_PLUG"/>
- </intent-filter>
- </activity>
- </application>
-
- <uses-sdk android:minSdkVersion="14" />
- <uses-permission android:name="android.permission.CAMERA"></uses-permission>
- <uses-feature android:name="android.hardware.camera" />
- <uses-feature android:name="android.hardware.camera.autofocus" />
- <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
- <uses-permission android:name="android.permission.RECORD_AUDIO" />
- <uses-permission android:name="android.permission.INTERNET" />
- <uses-permission android:name="android.permission.WAKE_LOCK" />
- <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
-</manifest> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/README b/webrtc/examples/android/media_demo/README
deleted file mode 100644
index af71f38f46..0000000000
--- a/webrtc/examples/android/media_demo/README
+++ /dev/null
@@ -1,24 +0,0 @@
-This directory contains a sample app for sending and receiving audio
-on Android. It further lets you enable and disable some call quality
-enhancements such as echo cancellation, noise suppression etc.
-
-Prerequisites:
-- Make sure gclient is checking out tools necessary to target Android: your
- .gclient file should contain a line like:
- target_os = ['android']
- Make sure to re-run gclient sync after adding this to download the tools.
-- Env vars need to be set up to target Android; easiest way to do this is to run
- (from the libjingle trunk directory):
- . ./build/android/envsetup.sh
- Note that this clobbers any previously-set $GYP_DEFINES so it must be done
- before the next item.
-- Set up webrtc-related GYP variables:
- export GYP_DEFINES="$GYP_DEFINES java_home=</path/to/JDK>"
-- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
-
-Example of building the app:
-cd <path/to/repository>/trunk
-ninja -C out/Debug WebRTCDemo
-
-It can then be installed and run on the device:
-adb install -r out/Debug/WebRTCDemo-debug.apk
diff --git a/webrtc/examples/android/media_demo/build.xml b/webrtc/examples/android/media_demo/build.xml
deleted file mode 100644
index 17734886d9..0000000000
--- a/webrtc/examples/android/media_demo/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="WebRTCDemo" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
- <isset property="env.ANDROID_SDK_ROOT" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: 1 -->
- <import file="${sdk.dir}/tools/ant/build.xml" />
-
-</project>
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.cc b/webrtc/examples/android/media_demo/jni/jni_helpers.cc
deleted file mode 100644
index b0d1a7425c..0000000000
--- a/webrtc/examples/android/media_demo/jni/jni_helpers.cc
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
-
-#include <limits>
-
-#include "unicode/unistr.h"
-
-using icu::UnicodeString;
-
-jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
- const char* signature) {
- jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
- CHECK_JNI_EXCEPTION(jni, "error during GetMethodID");
- return m;
-}
-
-jlong jlongFromPointer(void* ptr) {
- CHECK(sizeof(intptr_t) <= sizeof(jlong), "Time to rethink the use of jlongs");
- // Going through intptr_t to be obvious about the definedness of the
- // conversion from pointer to integral type. intptr_t to jlong is a standard
- // widening by the COMPILE_ASSERT above.
- jlong ret = reinterpret_cast<intptr_t>(ptr);
- CHECK(reinterpret_cast<void*>(ret) == ptr,
- "jlong does not convert back to pointer");
- return ret;
-}
-
-// Given a (UTF-16) jstring return a new UTF-8 native string.
-std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
- const jchar* jchars = jni->GetStringChars(j_string, NULL);
- CHECK_JNI_EXCEPTION(jni, "Error during GetStringChars");
- UnicodeString ustr(jchars, jni->GetStringLength(j_string));
- CHECK_JNI_EXCEPTION(jni, "Error during GetStringLength");
- jni->ReleaseStringChars(j_string, jchars);
- CHECK_JNI_EXCEPTION(jni, "Error during ReleaseStringChars");
- std::string ret;
- return ustr.toUTF8String(ret);
-}
-
-ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni, const char** classes,
- int size) {
- for (int i = 0; i < size; ++i) {
- LoadClass(jni, classes[i]);
- }
-}
-ClassReferenceHolder::~ClassReferenceHolder() {
- CHECK(classes_.empty(), "Must call FreeReferences() before dtor!");
-}
-
-void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
- for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
- it != classes_.end(); ++it) {
- jni->DeleteGlobalRef(it->second);
- }
- classes_.clear();
-}
-
-jclass ClassReferenceHolder::GetClass(const std::string& name) {
- std::map<std::string, jclass>::iterator it = classes_.find(name);
- CHECK(it != classes_.end(), "Could not find class");
- return it->second;
-}
-
-void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
- jclass localRef = jni->FindClass(name.c_str());
- CHECK_JNI_EXCEPTION(jni, "Could not load class");
- CHECK(localRef, name.c_str());
- jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
- CHECK_JNI_EXCEPTION(jni, "error during NewGlobalRef");
- CHECK(globalRef, name.c_str());
- bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
- CHECK(inserted, "Duplicate class name");
-}
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.h b/webrtc/examples/android/media_demo/jni/jni_helpers.h
deleted file mode 100644
index 3d8ff48111..0000000000
--- a/webrtc/examples/android/media_demo/jni/jni_helpers.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
-#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
-
-// TODO(henrike): this file contains duplication with regards to
-// talk/app/webrtc/java/jni/peerconnection_jni.cc. When/if code can be shared
-// between trunk/talk and trunk/webrtc remove the duplication.
-
-#include <android/log.h>
-#include <jni.h>
-
-#include <assert.h>
-#include <map>
-#include <string>
-
-#define TAG "WEBRTC-NATIVE"
-
-// Abort the process if |x| is false, emitting |msg| to logcat.
-#define CHECK(x, msg) \
- if (x) { \
- } else { \
- __android_log_print(ANDROID_LOG_ERROR, TAG, "%s:%d: %s", __FILE__, \
- __LINE__, msg); \
- assert(false); \
- }
-
-// Abort the process if |jni| has a Java exception pending, emitting |msg| to
-// logcat.
-#define CHECK_JNI_EXCEPTION(jni, msg) \
- if (0) { \
- } else { \
- if (jni->ExceptionCheck()) { \
- jni->ExceptionDescribe(); \
- jni->ExceptionClear(); \
- CHECK(0, msg); \
- } \
- }
-
-// JNIEnv-helper methods that CHECK success: no Java exception thrown and found
-// object/class/method/field is non-null.
-jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
- const char* signature);
-
-// Return a |jlong| that will automatically convert back to |ptr| when assigned
-// to a |uint64_t|
-jlong jlongFromPointer(void* ptr);
-
-// Given a (UTF-16) jstring return a new UTF-8 native string.
-std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
-
-// Android's FindClass() is trickier than usual because the app-specific
-// ClassLoader is not consulted when there is no app-specific frame on the
-// stack. Consequently, we only look up classes once in JNI_OnLoad.
-// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
-class ClassReferenceHolder {
- public:
- ClassReferenceHolder(JNIEnv* jni, const char** classes, int size);
- ~ClassReferenceHolder();
-
- void FreeReferences(JNIEnv* jni);
-
- jclass GetClass(const std::string& name);
-
- private:
- void LoadClass(JNIEnv* jni, const std::string& name);
-
- std::map<std::string, jclass> classes_;
-};
-
-#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
diff --git a/webrtc/examples/android/media_demo/jni/on_load.cc b/webrtc/examples/android/media_demo/jni/on_load.cc
deleted file mode 100644
index 5827ee8a30..0000000000
--- a/webrtc/examples/android/media_demo/jni/on_load.cc
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <jni.h>
-
-#include <assert.h>
-
-#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
-#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
-#include "webrtc/voice_engine/include/voe_base.h"
-
-// Macro for native functions that can be found by way of jni-auto discovery.
-// Note extern "C" is needed for "discovery" of native methods to work.
-#define JOWW(rettype, name) \
- extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
-
-static JavaVM* g_vm = NULL;
-
-extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
- // Only called once.
- CHECK(!g_vm, "OnLoad called more than once");
- g_vm = vm;
- return JNI_VERSION_1_4;
-}
-
-JOWW(void, NativeWebRtcContextRegistry_register)(
- JNIEnv* jni,
- jclass,
- jobject context) {
- webrtc_examples::SetVoeDeviceObjects(g_vm);
- CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, context) == 0,
- "Failed to register android objects to voice engine");
-}
-
-JOWW(void, NativeWebRtcContextRegistry_unRegister)(
- JNIEnv* jni,
- jclass) {
- CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL) == 0,
- "Failed to unregister android objects from voice engine");
- webrtc_examples::ClearVoeDeviceObjects();
-}
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc b/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
deleted file mode 100644
index 79d6cbc4b7..0000000000
--- a/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
+++ /dev/null
@@ -1,423 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file contains JNI for the voice engine interfaces.
-// The native functions are found using jni's auto discovery.
-
-#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
-
-#include <map>
-#include <string>
-
-#include "webrtc/base/arraysize.h"
-#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
-#include "webrtc/voice_engine/include/voe_audio_processing.h"
-#include "webrtc/voice_engine/include/voe_base.h"
-#include "webrtc/voice_engine/include/voe_codec.h"
-#include "webrtc/voice_engine/include/voe_file.h"
-#include "webrtc/voice_engine/include/voe_hardware.h"
-#include "webrtc/voice_engine/include/voe_network.h"
-#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
-#include "webrtc/voice_engine/include/voe_volume_control.h"
-
-// Macro for native functions that can be found by way of jni-auto discovery.
-// Note extern "C" is needed for "discovery" of native methods to work.
-#define JOWW(rettype, name) \
- extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
-
-namespace {
-
-static JavaVM* g_vm = NULL;
-static ClassReferenceHolder* g_class_reference_holder = NULL;
-
-jclass GetClass(JNIEnv* jni, const char* name) {
- CHECK(g_class_reference_holder, "Class reference holder NULL");
- return g_class_reference_holder->GetClass(name);
-}
-
-static const char* g_classes[] = {"org/webrtc/webrtcdemo/CodecInst"};
-
-template<typename T>
-void ReleaseSubApi(T instance) {
- CHECK(instance->Release() >= 0, "failed to release instance")
-}
-
-class VoiceEngineData {
- public:
- VoiceEngineData()
- : ve(webrtc::VoiceEngine::Create()),
- base(webrtc::VoEBase::GetInterface(ve)),
- codec(webrtc::VoECodec::GetInterface(ve)),
- file(webrtc::VoEFile::GetInterface(ve)),
- netw(webrtc::VoENetwork::GetInterface(ve)),
- apm(webrtc::VoEAudioProcessing::GetInterface(ve)),
- volume(webrtc::VoEVolumeControl::GetInterface(ve)),
- hardware(webrtc::VoEHardware::GetInterface(ve)),
- rtp(webrtc::VoERTP_RTCP::GetInterface(ve)) {
- CHECK(ve != NULL, "Voice engine instance failed to be created");
- CHECK(base != NULL, "Failed to acquire base interface");
- CHECK(codec != NULL, "Failed to acquire codec interface");
- CHECK(file != NULL, "Failed to acquire file interface");
- CHECK(netw != NULL, "Failed to acquire netw interface");
- CHECK(apm != NULL, "Failed to acquire apm interface");
- CHECK(volume != NULL, "Failed to acquire volume interface");
- CHECK(hardware != NULL, "Failed to acquire hardware interface");
- CHECK(rtp != NULL, "Failed to acquire rtp interface");
- }
-
- ~VoiceEngineData() {
- CHECK(channel_transports_.empty(),
- "VoE transports must be deleted before terminating");
- CHECK(base->Terminate() == 0, "VoE failed to terminate");
- ReleaseSubApi(base);
- ReleaseSubApi(codec);
- ReleaseSubApi(file);
- ReleaseSubApi(netw);
- ReleaseSubApi(apm);
- ReleaseSubApi(volume);
- ReleaseSubApi(hardware);
- ReleaseSubApi(rtp);
- webrtc::VoiceEngine* ve_instance = ve;
- CHECK(webrtc::VoiceEngine::Delete(ve_instance), "VoE failed to be deleted");
- }
-
- int CreateChannel() {
- int channel = base->CreateChannel();
- if (channel == -1) {
- return -1;
- }
- CreateTransport(channel);
- return channel;
- }
-
- int DeleteChannel(int channel) {
- if (base->DeleteChannel(channel) != 0) {
- return -1;
- }
- DeleteTransport(channel);
- return 0;
- }
-
- webrtc::test::VoiceChannelTransport* GetTransport(int channel) {
- ChannelTransports::iterator found = channel_transports_.find(channel);
- if (found == channel_transports_.end()) {
- return NULL;
- }
- return found->second;
- }
-
- webrtc::VoiceEngine* const ve;
- webrtc::VoEBase* const base;
- webrtc::VoECodec* const codec;
- webrtc::VoEFile* const file;
- webrtc::VoENetwork* const netw;
- webrtc::VoEAudioProcessing* const apm;
- webrtc::VoEVolumeControl* const volume;
- webrtc::VoEHardware* const hardware;
- webrtc::VoERTP_RTCP* const rtp;
-
- private:
- // Voice engine no longer provides a socket implementation. There is,
- // however, a socket implementation in webrtc::test.
- typedef std::map<int, webrtc::test::VoiceChannelTransport*>
- ChannelTransports;
-
- void CreateTransport(int channel) {
- CHECK(GetTransport(channel) == NULL,
- "Transport already created for VoE channel, inconsistent state");
- channel_transports_[channel] =
- new webrtc::test::VoiceChannelTransport(netw, channel);
- }
- void DeleteTransport(int channel) {
- CHECK(GetTransport(channel) != NULL,
- "VoE channel missing transport, inconsistent state");
- delete channel_transports_[channel];
- channel_transports_.erase(channel);
- }
-
- ChannelTransports channel_transports_;
-};
-
-webrtc::CodecInst* GetCodecInst(JNIEnv* jni, jobject j_codec) {
- jclass j_codec_class = jni->GetObjectClass(j_codec);
- jfieldID native_codec_id =
- jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
- jlong j_p = jni->GetLongField(j_codec, native_codec_id);
- return reinterpret_cast<webrtc::CodecInst*>(j_p);
-}
-
-} // namespace
-
-namespace webrtc_examples {
-
-void SetVoeDeviceObjects(JavaVM* vm) {
- CHECK(vm, "Trying to register NULL vm");
- g_vm = vm;
- webrtc::AttachThreadScoped ats(g_vm);
- JNIEnv* jni = ats.env();
- g_class_reference_holder = new ClassReferenceHolder(
- jni, g_classes, arraysize(g_classes));
-}
-
-void ClearVoeDeviceObjects() {
- CHECK(g_vm, "Clearing vm without it being set");
- {
- webrtc::AttachThreadScoped ats(g_vm);
- g_class_reference_holder->FreeReferences(ats.env());
- }
- g_vm = NULL;
- delete g_class_reference_holder;
- g_class_reference_holder = NULL;
-}
-
-} // namespace webrtc_examples
-
-VoiceEngineData* GetVoiceEngineData(JNIEnv* jni, jobject j_voe) {
- jclass j_voe_class = jni->GetObjectClass(j_voe);
- jfieldID native_voe_id =
- jni->GetFieldID(j_voe_class, "nativeVoiceEngine", "J");
- jlong j_p = jni->GetLongField(j_voe, native_voe_id);
- return reinterpret_cast<VoiceEngineData*>(j_p);
-}
-
-webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe) {
- return GetVoiceEngineData(jni, j_voe)->ve;
-}
-
-JOWW(jlong, VoiceEngine_create)(JNIEnv* jni, jclass) {
- VoiceEngineData* voe_data = new VoiceEngineData();
- return jlongFromPointer(voe_data);
-}
-
-JOWW(void, VoiceEngine_dispose)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- delete voe_data;
-}
-
-JOWW(jint, VoiceEngine_init)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->Init();
-}
-
-JOWW(jint, VoiceEngine_createChannel)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->CreateChannel();
-}
-
-JOWW(jint, VoiceEngine_deleteChannel)(JNIEnv* jni, jobject j_voe,
- jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->DeleteChannel(channel);
-}
-
-JOWW(jint, VoiceEngine_setLocalReceiver)(JNIEnv* jni, jobject j_voe,
- jint channel, jint port) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::test::VoiceChannelTransport* transport =
- voe_data->GetTransport(channel);
- return transport->SetLocalReceiver(port);
-}
-
-JOWW(jint, VoiceEngine_setSendDestination)(JNIEnv* jni, jobject j_voe,
- jint channel, jint port,
- jstring j_addr) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string addr = JavaToStdString(jni, j_addr);
- webrtc::test::VoiceChannelTransport* transport =
- voe_data->GetTransport(channel);
- return transport->SetSendDestination(addr.c_str(), port);
-}
-
-JOWW(jint, VoiceEngine_startListen)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartReceive(channel);
-}
-
-JOWW(jint, VoiceEngine_startPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartPlayout(channel);
-}
-
-JOWW(jint, VoiceEngine_startSend)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartSend(channel);
-}
-
-JOWW(jint, VoiceEngine_stopListen)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartReceive(channel);
-}
-
-JOWW(jint, VoiceEngine_stopPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StopPlayout(channel);
-}
-
-JOWW(jint, VoiceEngine_stopSend)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StopSend(channel);
-}
-
-JOWW(jint, VoiceEngine_setSpeakerVolume)(JNIEnv* jni, jobject j_voe,
- jint level) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->volume->SetSpeakerVolume(level);
-}
-
-JOWW(jint, VoiceEngine_startPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
- jint channel,
- jstring j_filename,
- jboolean loop) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string filename = JavaToStdString(jni, j_filename);
- return voe_data->file->StartPlayingFileLocally(channel,
- filename.c_str(),
- loop);
-}
-
-JOWW(jint, VoiceEngine_stopPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
- jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->file->StopPlayingFileLocally(channel);
-}
-
-JOWW(jint, VoiceEngine_startPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
- jint channel,
- jstring j_filename,
- jboolean loop) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string filename = JavaToStdString(jni, j_filename);
- return voe_data->file->StartPlayingFileAsMicrophone(channel,
- filename.c_str(),
- loop);
-}
-
-JOWW(jint, VoiceEngine_stopPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
- jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->file->StopPlayingFileAsMicrophone(channel);
-}
-
-JOWW(jint, VoiceEngine_numOfCodecs)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->codec->NumOfCodecs();
-}
-
-JOWW(jobject, VoiceEngine_getCodec)(JNIEnv* jni, jobject j_voe, jint index) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::CodecInst* codec = new webrtc::CodecInst();
- CHECK(voe_data->codec->GetCodec(index, *codec) == 0,
- "getCodec must be called with valid index");
- jclass j_codec_class = GetClass(jni, "org/webrtc/webrtcdemo/CodecInst");
- jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "<init>", "(J)V");
- jobject j_codec =
- jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
- CHECK_JNI_EXCEPTION(jni, "error during NewObject");
- return j_codec;
-}
-
-JOWW(jint, VoiceEngine_setSendCodec)(JNIEnv* jni, jobject j_voe, jint channel,
- jobject j_codec) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::CodecInst* inst = GetCodecInst(jni, j_codec);
- return voe_data->codec->SetSendCodec(channel, *inst);
-}
-
-JOWW(jint, VoiceEngine_setEcStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
- jint ec_mode) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetEcStatus(enable,
- static_cast<webrtc::EcModes>(ec_mode));
-}
-
-JOWW(jint, VoiceEngine_setAecmMode)(JNIEnv* jni, jobject j_voe, jint aecm_mode,
- jboolean cng) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetAecmMode(static_cast<webrtc::AecmModes>(aecm_mode),
- cng);
-}
-
-JOWW(jint, VoiceEngine_setAgcStatus)(JNIEnv* jni, jobject j_voe,
- jboolean enable, jint agc_mode) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetAgcStatus(enable,
- static_cast<webrtc::AgcModes>(agc_mode));
-}
-
-// Returns the native AgcConfig object associated with the Java object
-// |j_codec|.
-void GetNativeAgcConfig(JNIEnv* jni, jobject j_codec,
- webrtc::AgcConfig* agc_config) {
- jclass j_codec_class = jni->GetObjectClass(j_codec);
- jfieldID dBOv_id = jni->GetFieldID(j_codec_class, "targetLevelDbOv", "I");
- agc_config->targetLeveldBOv = jni->GetIntField(j_codec, dBOv_id);
- jfieldID gain_id =
- jni->GetFieldID(j_codec_class, "digitalCompressionGaindB", "I");
- agc_config->digitalCompressionGaindB = jni->GetIntField(j_codec, gain_id);
- jfieldID limiter_id = jni->GetFieldID(j_codec_class, "limiterEnable", "Z");
- agc_config->limiterEnable = jni->GetBooleanField(j_codec, limiter_id);
-}
-
-JOWW(jint, VoiceEngine_setAgcConfig)(JNIEnv* jni, jobject j_voe,
- jobject j_config) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::AgcConfig config;
- GetNativeAgcConfig(jni, j_config, &config);
- return voe_data->apm->SetAgcConfig(config);
-}
-
-JOWW(jint, VoiceEngine_setNsStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
- jint ns_mode) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetNsStatus(enable,
- static_cast<webrtc::NsModes>(ns_mode));
-}
-
-JOWW(jint, VoiceEngine_startDebugRecording)(JNIEnv* jni, jobject j_voe,
- jstring j_filename) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string filename = JavaToStdString(jni, j_filename);
- return voe_data->apm->StartDebugRecording(filename.c_str());
-}
-
-JOWW(jint, VoiceEngine_stopDebugRecording)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->StopDebugRecording();
-}
-
-JOWW(void, CodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
- delete GetCodecInst(jni, j_codec);
-}
-
-JOWW(jint, CodecInst_plType)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->pltype;
-}
-
-JOWW(jstring, CodecInst_name)(JNIEnv* jni, jobject j_codec) {
- return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plname);
-}
-
-JOWW(jint, CodecInst_plFrequency)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->plfreq;
-}
-
-JOWW(jint, CodecInst_pacSize)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->pacsize;
-}
-
-JOWW(jint, CodecInst_channels)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->channels;
-}
-
-JOWW(jint, CodecInst_rate)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->rate;
-}
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.h b/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
deleted file mode 100644
index 57ef507653..0000000000
--- a/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
-#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
-
-#include <jni.h>
-
-namespace webrtc {
-
-class VoiceEngine;
-
-} // namespace webrtc
-
-namespace webrtc_examples {
-
-void SetVoeDeviceObjects(JavaVM* vm);
-void ClearVoeDeviceObjects();
-
-} // namespace webrtc_examples
-
-webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe);
-
-#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
diff --git a/webrtc/examples/android/media_demo/project.properties b/webrtc/examples/android/media_demo/project.properties
deleted file mode 100644
index 69eb2d039b..0000000000
--- a/webrtc/examples/android/media_demo/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system use,
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-23
diff --git a/webrtc/examples/android/media_demo/res/drawable/logo.png b/webrtc/examples/android/media_demo/res/drawable/logo.png
deleted file mode 100644
index 1ff07d1102..0000000000
--- a/webrtc/examples/android/media_demo/res/drawable/logo.png
+++ /dev/null
Binary files differ
diff --git a/webrtc/examples/android/media_demo/res/layout/audiomenu.xml b/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
deleted file mode 100644
index f35547062a..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent"
- android:orientation="vertical">
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:textStyle="bold"
- android:textSize="24dip"
- android:text="Audio Settings">
- </TextView>
- <TextView android:layout_height="wrap_content"
- android:layout_gravity="bottom"
- android:layout_width="wrap_content"
- android:text="@string/codecType">
- </TextView>
- <Spinner android:id="@+id/spAudioCodecType"
- android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- </Spinner>
- <LinearLayout android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/aTxPort">
- </TextView>
- <EditText android:id="@+id/etATxPort"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:imeOptions="actionDone"
- android:inputType="number">
- </EditText>
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/aRxPort">
- </TextView>
- <EditText android:id="@+id/etARxPort"
- android:layout_height="wrap_content"
- android:layout_width="wrap_content"
- android:imeOptions="actionDone"
- android:inputType="number">
- </EditText>
- </LinearLayout>
- <LinearLayout android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <CheckBox android:id="@+id/cbAecm"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/aecm">
- </CheckBox>
- <CheckBox android:id="@+id/cbNoiseSuppression"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/noiseSuppression">
- </CheckBox>
- <CheckBox android:id="@+id/cbAutoGainControl"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/autoGainControl">
- </CheckBox>
- </LinearLayout>
- <LinearLayout android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <CheckBox android:id="@+id/cbSpeaker"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/speaker">
- </CheckBox>
- <CheckBox android:id="@+id/cbDebugRecording"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/debugRecording">
- </CheckBox>
- <CheckBox android:id="@+id/cbAudioRTPDump"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/rtpDump">
- </CheckBox>
- </LinearLayout>
-</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml b/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
deleted file mode 100644
index 1014612000..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:orientation="vertical"
- android:padding="3dip">
- <TextView android:id="@+id/spinner_row"
- android:layout_toRightOf="@+id/image"
- android:padding="3dip"
- android:layout_marginTop="2dip"
- android:textColor="#FFF"
- android:textStyle="bold"
- android:text="description"
- android:layout_marginLeft="5dip"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"/>
-</RelativeLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/mainmenu.xml b/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
deleted file mode 100644
index 89f5399df7..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout
- xmlns:android="http://schemas.android.com/apk/res/android"
- android:orientation="horizontal"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent">
- <LinearLayout
- android:orientation="vertical"
- android:layout_width="120dip"
- android:layout_height="fill_parent">
- <TextView android:id="@+id/tvStats"
- android:layout_width="fill_parent"
- android:layout_height="60dip"
- android:textSize="6sp"
- android:text=""/>
- <Button android:id="@+id/btStats"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:layout_gravity="bottom"
- android:text="@string/stats"/>
- <Button android:id="@+id/btStartStopCall"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:layout_gravity="bottom"/>
- </LinearLayout>
-</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml b/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
deleted file mode 100644
index 4fba57eadc..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent"
- android:layout_gravity="right"
- android:orientation="vertical">
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:textStyle="bold"
- android:textSize="24dip"
- android:text="@string/gSettings">
- </TextView>
- <LinearLayout android:orientation="horizontal"
- android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <CheckBox android:id="@+id/cbAudio"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/enableAudio">
- </CheckBox>
- <CheckBox android:id="@+id/cbLoopback"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/loopback">
- </CheckBox>
- </LinearLayout>
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/remoteIp">
- </TextView>
- <EditText android:id="@+id/etRemoteIp"
- android:layout_height="wrap_content"
- android:layout_width="fill_parent"
- android:imeOptions="actionDone">
- </EditText>
-</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml b/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
deleted file mode 100644
index a4921a6bbe..0000000000
--- a/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<menu xmlns:android="http://schemas.android.com/apk/res/android" >
- <item android:id="@+id/action_exit"
- android:icon="@drawable/logo"
- android:title="Exit"/>
-</menu> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/bools.xml b/webrtc/examples/android/media_demo/res/values/bools.xml
deleted file mode 100644
index d4f3fc0e95..0000000000
--- a/webrtc/examples/android/media_demo/res/values/bools.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
- <bool name="apm_debug_enabled_default">false</bool>
- <bool name="audio_enabled_default">true</bool>
- <bool name="loopback_enabled_default">true</bool>
- <bool name="nack_enabled_default">true</bool>
- <bool name="opengl_enabled_default">true</bool>
- <bool name="speaker_enabled_default">false</bool>
- <bool name="stats_enabled_default">true</bool>
- <bool name="trace_enabled_default">true</bool>
- <bool name="video_receive_enabled_default">true</bool>
- <bool name="video_send_enabled_default">true</bool>
-</resources> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/integers.xml b/webrtc/examples/android/media_demo/res/values/integers.xml
deleted file mode 100644
index 562643b5f2..0000000000
--- a/webrtc/examples/android/media_demo/res/values/integers.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
- <integer name="aRxPortDefault">11113</integer>
- <integer name="aTxPortDefault">11113</integer>
- <integer name="openGl">0</integer>
- <integer name="surfaceView">1</integer>
- <integer name="mediaCodec">2</integer>
- <integer name="defaultView">0</integer>
- <integer name="call_restart_periodicity_ms">0</integer>
- <integer name="video_codec_default">0</integer>
- <integer name="vRxPortDefault">11111</integer>
- <integer name="vTxPortDefault">11111</integer>
-</resources> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/strings.xml b/webrtc/examples/android/media_demo/res/values/strings.xml
deleted file mode 100644
index 297d289b0c..0000000000
--- a/webrtc/examples/android/media_demo/res/values/strings.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
- <string name="aecm">AECM</string>
- <string name="appName">WebRTC Engine Demo</string>
- <string name="aRxPort">Audio Rx Port</string>
- <string name="aTxPort">Audio Tx Port</string>
- <string name="autoGainControl">AGC</string>
- <string name="backCamera">SwitchToBack</string>
- <string name="codecSize">Codec Size</string>
- <string name="codecType">Codec Type</string>
- <string name="debugRecording">APMRecord</string>
- <string name="demoTitle">Video Engine Android Demo</string>
- <string name="enableVideoReceive">Video Receive</string>
- <string name="enableVideoSend">Video Send</string>
- <string name="enableAudio">Audio</string>
- <string name="error">Error</string>
- <string name="errorCamera">Camera Error</string>
- <string name="exit">Exit</string>
- <string name="frontCamera">SwitchToFront</string>
- <string name="gSettings">Global Settings</string>
- <string name="loopback">Loopback</string>
- <string name="loopbackIp">127.0.0.1</string>
- <string name="nack">NACK</string>
- <string name="noiseSuppression">NS</string>
- <string name="remoteIp">Remote IP address</string>
- <string name="rtpDump">rtpdump</string>
- <string name="speaker">Speaker</string>
- <string name="startBoth">Start Both</string>
- <string name="startCall">StartCall</string>
- <string name="startListen">Start Listen</string>
- <string name="startSend">Start Send</string>
- <string name="stats">Stats</string>
- <string name="statsOn">Stats on</string>
- <string name="statsOff">Stats off</string>
- <string name="stopCall">StopCall</string>
- <string name="surfaceView">SurfaceView</string>
- <string name="tag">WEBRTC</string>
- <string name="vRxPort">Video Rx Port</string>
- <string name="vSettings">Video Settings</string>
- <string name="vTxPort">Video Tx Port</string>
-</resources>
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
deleted file mode 100644
index 94e23c2465..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.Activity;
-import android.app.Fragment;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.LayoutInflater;
-import android.view.View;
-import android.view.ViewGroup;
-import android.widget.AdapterView;
-import android.widget.AdapterView.OnItemSelectedListener;
-import android.widget.CheckBox;
-import android.widget.EditText;
-import android.widget.Spinner;
-import android.widget.TextView;
-import java.lang.Integer;
-
-public class AudioMenuFragment extends Fragment {
-
- private String TAG;
- private MenuStateProvider stateProvider;
-
- @Override
- public View onCreateView(LayoutInflater inflater, ViewGroup container,
- Bundle savedInstanceState) {
- View v = inflater.inflate(R.layout.audiomenu, container, false);
-
- TAG = getResources().getString(R.string.tag);
-
- String[] audioCodecsStrings = getEngine().audioCodecsAsString();
- Spinner spAudioCodecType = (Spinner) v.findViewById(R.id.spAudioCodecType);
- spAudioCodecType.setAdapter(new SpinnerAdapter(getActivity(),
- R.layout.dropdownitems,
- audioCodecsStrings,
- inflater));
- spAudioCodecType.setSelection(getEngine().audioCodecIndex());
- spAudioCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
- public void onItemSelected(AdapterView<?> adapterView, View view,
- int position, long id) {
- getEngine().setAudioCodec(position);
- }
- public void onNothingSelected(AdapterView<?> arg0) {
- Log.d(TAG, "No setting selected");
- }
- });
-
- EditText etATxPort = (EditText) v.findViewById(R.id.etATxPort);
- etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
- etATxPort.setOnClickListener(new View.OnClickListener() {
- public void onClick(View editText) {
- EditText etATxPort = (EditText) editText;
- getEngine()
- .setAudioTxPort(Integer.parseInt(etATxPort.getText().toString()));
- etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
- }
- });
- EditText etARxPort = (EditText) v.findViewById(R.id.etARxPort);
- etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
- etARxPort.setOnClickListener(new View.OnClickListener() {
- public void onClick(View editText) {
- EditText etARxPort = (EditText) editText;
- getEngine()
- .setAudioRxPort(Integer.parseInt(etARxPort.getText().toString()));
- etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
-
- }
- });
-
- CheckBox cbEnableAecm = (CheckBox) v.findViewById(R.id.cbAecm);
- cbEnableAecm.setChecked(getEngine().aecmEnabled());
- cbEnableAecm.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableAecm = (CheckBox) checkBox;
- getEngine().setEc(cbEnableAecm.isChecked());
- cbEnableAecm.setChecked(getEngine().aecmEnabled());
- }
- });
- CheckBox cbEnableNs = (CheckBox) v.findViewById(R.id.cbNoiseSuppression);
- cbEnableNs.setChecked(getEngine().nsEnabled());
- cbEnableNs.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableNs = (CheckBox) checkBox;
- getEngine().setNs(cbEnableNs.isChecked());
- cbEnableNs.setChecked(getEngine().nsEnabled());
- }
- });
- CheckBox cbEnableAgc = (CheckBox) v.findViewById(R.id.cbAutoGainControl);
- cbEnableAgc.setChecked(getEngine().agcEnabled());
- cbEnableAgc.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableAgc = (CheckBox) checkBox;
- getEngine().setAgc(cbEnableAgc.isChecked());
- cbEnableAgc.setChecked(getEngine().agcEnabled());
- }
- });
- CheckBox cbEnableSpeaker = (CheckBox) v.findViewById(R.id.cbSpeaker);
- cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
- cbEnableSpeaker.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableSpeaker = (CheckBox) checkBox;
- getEngine().setSpeaker(cbEnableSpeaker.isChecked());
- cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
- }
- });
- CheckBox cbEnableDebugAPM =
- (CheckBox) v.findViewById(R.id.cbDebugRecording);
- cbEnableDebugAPM.setChecked(getEngine().apmRecord());
- cbEnableDebugAPM.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableDebugAPM = (CheckBox) checkBox;
- getEngine().setDebuging(cbEnableDebugAPM.isChecked());
- cbEnableDebugAPM.setChecked(getEngine().apmRecord());
- }
- });
- CheckBox cbEnableAudioRTPDump =
- (CheckBox) v.findViewById(R.id.cbAudioRTPDump);
- cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
- cbEnableAudioRTPDump.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableAudioRTPDump = (CheckBox) checkBox;
- getEngine().setIncomingVoeRtpDump(cbEnableAudioRTPDump.isChecked());
- cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
- }
- });
- return v;
- }
-
- @Override
- public void onAttach(Activity activity) {
- super.onAttach(activity);
-
- // This makes sure that the container activity has implemented
- // the callback interface. If not, it throws an exception.
- try {
- stateProvider = (MenuStateProvider) activity;
- } catch (ClassCastException e) {
- throw new ClassCastException(activity +
- " must implement MenuStateProvider");
- }
- }
-
- private MediaEngine getEngine() {
- return stateProvider.getEngine();
- }
-
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
deleted file mode 100644
index 133d63926b..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public class CodecInst {
- private final long nativeCodecInst;
-
- // CodecInst can only be created from the native layer.
- private CodecInst(long nativeCodecInst) {
- this.nativeCodecInst = nativeCodecInst;
- }
-
- public String toString() {
- return name() + " " +
- "PlType: " + plType() + " " +
- "PlFreq: " + plFrequency() + " " +
- "Size: " + pacSize() + " " +
- "Channels: " + channels() + " " +
- "Rate: " + rate();
- }
-
- // Dispose must be called before all references to CodecInst are lost as it
- // will free memory allocated in the native layer.
- public native void dispose();
- public native int plType();
- public native String name();
- public native int plFrequency();
- public native int pacSize();
- public native int channels();
- public native int rate();
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
deleted file mode 100644
index 793d784043..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.Activity;
-import android.app.Fragment;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.LayoutInflater;
-import android.view.SurfaceView;
-import android.view.View;
-import android.view.ViewGroup;
-import android.widget.Button;
-import android.widget.LinearLayout;
-import android.widget.TextView;
-
-public class MainMenuFragment extends Fragment implements MediaEngineObserver {
-
- private String TAG;
- private MenuStateProvider stateProvider;
-
- private Button btStartStopCall;
- private TextView tvStats;
-
- @Override
- public View onCreateView(LayoutInflater inflater, ViewGroup container,
- Bundle savedInstanceState) {
- View v = inflater.inflate(R.layout.mainmenu, container, false);
-
- TAG = getResources().getString(R.string.tag);
-
- Button btStats = (Button) v.findViewById(R.id.btStats);
- boolean stats = getResources().getBoolean(R.bool.stats_enabled_default);
- enableStats(btStats, stats);
- btStats.setOnClickListener(new View.OnClickListener() {
- public void onClick(View button) {
- boolean turnOnStats = ((Button) button).getText().equals(
- getResources().getString(R.string.statsOn));
- enableStats((Button) button, turnOnStats);
- }
- });
- tvStats = (TextView) v.findViewById(R.id.tvStats);
-
- btStartStopCall = (Button) v.findViewById(R.id.btStartStopCall);
- btStartStopCall.setText(getEngine().isRunning() ?
- R.string.stopCall :
- R.string.startCall);
- btStartStopCall.setOnClickListener(new View.OnClickListener() {
- public void onClick(View button) {
- toggleStart();
- }
- });
- return v;
- }
-
- @Override
- public void onAttach(Activity activity) {
- super.onAttach(activity);
-
- // This makes sure that the container activity has implemented
- // the callback interface. If not, it throws an exception.
- try {
- stateProvider = (MenuStateProvider) activity;
- } catch (ClassCastException e) {
- throw new ClassCastException(activity +
- " must implement MenuStateProvider");
- }
- }
-
- // tvStats need to be updated on the UI thread.
- public void newStats(final String stats) {
- getActivity().runOnUiThread(new Runnable() {
- public void run() {
- tvStats.setText(stats);
- }
- });
- }
-
- private MediaEngine getEngine() {
- return stateProvider.getEngine();
- }
-
- private void enableStats(Button btStats, boolean enable) {
- if (enable) {
- getEngine().setObserver(this);
- } else {
- getEngine().setObserver(null);
- // Clear old stats text by posting empty stats.
- newStats("");
- }
- // If stats was true it was just turned on. This means that
- // clicking the button again should turn off stats.
- btStats.setText(enable ? R.string.statsOff : R.string.statsOn);
- }
-
-
- public void toggleStart() {
- if (getEngine().isRunning()) {
- stopAll();
- } else {
- startCall();
- }
- btStartStopCall.setText(getEngine().isRunning() ?
- R.string.stopCall :
- R.string.startCall);
- }
-
- public void stopAll() {
- getEngine().stop();
- }
-
- private void startCall() {
- getEngine().start();
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
deleted file mode 100644
index a7036914ff..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
+++ /dev/null
@@ -1,321 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.AlertDialog;
-import android.content.BroadcastReceiver;
-import android.content.Context;
-import android.content.DialogInterface;
-import android.content.Intent;
-import android.content.IntentFilter;
-import android.media.AudioManager;
-import android.os.Environment;
-import android.util.Log;
-import android.view.OrientationEventListener;
-import java.io.File;
-
-public class MediaEngine {
- private static final String LOG_DIR = "webrtc";
-
- // Checks for and communicate failures to user (logcat and popup).
- private void check(boolean value, String message) {
- if (value) {
- return;
- }
- Log.e("WEBRTC-CHECK", message);
- AlertDialog alertDialog = new AlertDialog.Builder(context).create();
- alertDialog.setTitle("WebRTC Error");
- alertDialog.setMessage(message);
- alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
- "OK",
- new DialogInterface.OnClickListener() {
- public void onClick(DialogInterface dialog, int which) {
- dialog.dismiss();
- return;
- }
- }
- );
- alertDialog.show();
- }
-
-
- // Shared Audio/Video members.
- private final Context context;
- private String remoteIp;
- private boolean enableTrace;
-
- // Audio
- private VoiceEngine voe;
- private int audioChannel;
- private boolean audioEnabled;
- private boolean voeRunning;
- private int audioCodecIndex;
- private int audioTxPort;
- private int audioRxPort;
-
- private boolean speakerEnabled;
- private boolean headsetPluggedIn;
- private boolean enableAgc;
- private boolean enableNs;
- private boolean enableAecm;
-
- private BroadcastReceiver headsetListener;
-
- private boolean audioRtpDump;
- private boolean apmRecord;
-
- private int inFps;
- private int inKbps;
- private int outFps;
- private int outKbps;
- private int inWidth;
- private int inHeight;
-
- public MediaEngine(Context context) {
- this.context = context;
- voe = new VoiceEngine();
- check(voe.init() == 0, "Failed voe Init");
- audioChannel = voe.createChannel();
- check(audioChannel >= 0, "Failed voe CreateChannel");
- check(audioChannel >= 0, "Failed voe CreateChannel");
-
- check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
- "VoE set Aecm speakerphone mode failed");
-
- // Set audio mode to communication
- AudioManager audioManager =
- ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
- audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
- // Listen to headset being plugged in/out.
- IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
- headsetListener = new BroadcastReceiver() {
- @Override
- public void onReceive(Context context, Intent intent) {
- if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
- headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
- updateAudioOutput();
- }
- }
- };
- context.registerReceiver(headsetListener, receiverFilter);
- }
-
- public void dispose() {
- check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
- context.unregisterReceiver(headsetListener);
- check(voe.deleteChannel(audioChannel) == 0, "VoE delete channel failed");
- voe.dispose();
- }
-
- public void start() {
- if (audioEnabled) {
- startVoE();
- }
- }
-
- public void stop() {
- stopVoe();
- }
-
- public boolean isRunning() {
- return voeRunning;
- }
-
- public void setRemoteIp(String remoteIp) {
- this.remoteIp = remoteIp;
- UpdateSendDestination();
- }
-
- public String remoteIp() { return remoteIp; }
-
- private String getDebugDirectory() {
- // Should create a folder in /scard/|LOG_DIR|
- return Environment.getExternalStorageDirectory().toString() + "/" +
- LOG_DIR;
- }
-
- private boolean createDebugDirectory() {
- File webrtc_dir = new File(getDebugDirectory());
- if (!webrtc_dir.exists()) {
- return webrtc_dir.mkdir();
- }
- return webrtc_dir.isDirectory();
- }
-
- public void startVoE() {
- check(!voeRunning, "VoE already started");
- check(voe.startListen(audioChannel) == 0, "Failed StartListen");
- check(voe.startPlayout(audioChannel) == 0, "VoE start playout failed");
- check(voe.startSend(audioChannel) == 0, "VoE start send failed");
- voeRunning = true;
- }
-
- private void stopVoe() {
- check(voeRunning, "VoE not started");
- check(voe.stopSend(audioChannel) == 0, "VoE stop send failed");
- check(voe.stopPlayout(audioChannel) == 0, "VoE stop playout failed");
- check(voe.stopListen(audioChannel) == 0, "VoE stop listen failed");
- voeRunning = false;
- }
-
- public void setAudio(boolean audioEnabled) {
- this.audioEnabled = audioEnabled;
- }
-
- public boolean audioEnabled() { return audioEnabled; }
-
- public int audioCodecIndex() { return audioCodecIndex; }
-
- public void setAudioCodec(int codecNumber) {
- audioCodecIndex = codecNumber;
- CodecInst codec = voe.getCodec(codecNumber);
- check(voe.setSendCodec(audioChannel, codec) == 0, "Failed setSendCodec");
- codec.dispose();
- }
-
- public String[] audioCodecsAsString() {
- String[] retVal = new String[voe.numOfCodecs()];
- for (int i = 0; i < voe.numOfCodecs(); ++i) {
- CodecInst codec = voe.getCodec(i);
- retVal[i] = codec.toString();
- codec.dispose();
- }
- return retVal;
- }
-
- private CodecInst[] defaultAudioCodecs() {
- CodecInst[] retVal = new CodecInst[voe.numOfCodecs()];
- for (int i = 0; i < voe.numOfCodecs(); ++i) {
- retVal[i] = voe.getCodec(i);
- }
- return retVal;
- }
-
- public int getIsacIndex() {
- CodecInst[] codecs = defaultAudioCodecs();
- for (int i = 0; i < codecs.length; ++i) {
- if (codecs[i].name().contains("ISAC")) {
- return i;
- }
- }
- return 0;
- }
-
- public void setAudioTxPort(int audioTxPort) {
- this.audioTxPort = audioTxPort;
- UpdateSendDestination();
- }
-
- public int audioTxPort() { return audioTxPort; }
-
- public void setAudioRxPort(int audioRxPort) {
- check(voe.setLocalReceiver(audioChannel, audioRxPort) == 0,
- "Failed setLocalReceiver");
- this.audioRxPort = audioRxPort;
- }
-
- public int audioRxPort() { return audioRxPort; }
-
- public boolean agcEnabled() { return enableAgc; }
-
- public void setAgc(boolean enable) {
- enableAgc = enable;
- VoiceEngine.AgcConfig agc_config =
- new VoiceEngine.AgcConfig(3, 9, true);
- check(voe.setAgcConfig(agc_config) == 0, "VoE set AGC Config failed");
- check(voe.setAgcStatus(enableAgc, VoiceEngine.AgcModes.FIXED_DIGITAL) == 0,
- "VoE set AGC Status failed");
- }
-
- public boolean nsEnabled() { return enableNs; }
-
- public void setNs(boolean enable) {
- enableNs = enable;
- check(voe.setNsStatus(enableNs,
- VoiceEngine.NsModes.MODERATE_SUPPRESSION) == 0,
- "VoE set NS Status failed");
- }
-
- public boolean aecmEnabled() { return enableAecm; }
-
- public void setEc(boolean enable) {
- enableAecm = enable;
- check(voe.setEcStatus(enable, VoiceEngine.EcModes.AECM) == 0,
- "voe setEcStatus");
- }
-
- public boolean speakerEnabled() {
- return speakerEnabled;
- }
-
- public void setSpeaker(boolean enable) {
- speakerEnabled = enable;
- updateAudioOutput();
- }
-
- // Debug helpers.
- public boolean apmRecord() { return apmRecord; }
-
- public boolean audioRtpDump() { return audioRtpDump; }
-
- public void setDebuging(boolean enable) {
- apmRecord = enable;
- if (!enable) {
- check(voe.stopDebugRecording() == 0, "Failed stopping debug");
- return;
- }
- if (!createDebugDirectory()) {
- check(false, "Unable to create debug directory.");
- return;
- }
- String debugDirectory = getDebugDirectory();
- check(voe.startDebugRecording(debugDirectory + String.format("/apm_%d.dat",
- System.currentTimeMillis())) == 0,
- "Failed starting debug");
- }
-
- public void setIncomingVoeRtpDump(boolean enable) {
- audioRtpDump = enable;
- if (!enable) {
- check(voe.stopRtpDump(audioChannel,
- VoiceEngine.RtpDirections.INCOMING) == 0,
- "voe stopping rtp dump");
- return;
- }
- String debugDirectory = getDebugDirectory();
- check(voe.startRtpDump(audioChannel, debugDirectory +
- String.format("/voe_%d.rtp", System.currentTimeMillis()),
- VoiceEngine.RtpDirections.INCOMING) == 0,
- "voe starting rtp dump");
- }
-
- private void updateAudioOutput() {
- boolean useSpeaker = !headsetPluggedIn && speakerEnabled;
- AudioManager audioManager =
- ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
- audioManager.setSpeakerphoneOn(useSpeaker);
- }
-
- private void UpdateSendDestination() {
- if (remoteIp == null) {
- return;
- }
- if (audioTxPort != 0) {
- check(voe.setSendDestination(audioChannel, audioTxPort,
- remoteIp) == 0, "VoE set send destination failed");
- }
- }
-
- MediaEngineObserver observer;
- public void setObserver(MediaEngineObserver observer) {
- this.observer = observer;
- }
-}
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
deleted file mode 100644
index 3ea91b5e92..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public interface MediaEngineObserver {
- void newStats(String stats);
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
deleted file mode 100644
index 08cb508667..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public interface MenuStateProvider {
- public MediaEngine getEngine();
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
deleted file mode 100644
index 3d4f00a4f6..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.content.Context;
-
-public class NativeWebRtcContextRegistry {
- static {
- System.loadLibrary("webrtcdemo-jni");
- }
-
- public native void register(Context context);
- public native void unRegister();
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
deleted file mode 100644
index dbe817b1af..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public class RtcpStatistics {
- // Definition of fraction lost can be found in RFC3550.
- // It is equivalent to taking the integer part after multiplying the loss
- // fraction by 256.
- public final int fractionLost;
- public final int cumulativeLost;
- public final int extendedMax;
- public final int jitter;
- public final int rttMs;
-
- // Only allowed to be created by the native layer.
- private RtcpStatistics(int fractionLost, int cumulativeLost, int extendedMax,
- int jitter, int rttMs) {
- this.fractionLost = fractionLost;
- this.cumulativeLost = cumulativeLost;
- this.extendedMax = extendedMax;
- this.jitter = jitter;
- this.rttMs = rttMs;
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
deleted file mode 100644
index 761f96ce29..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.Activity;
-import android.app.Fragment;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.LayoutInflater;
-import android.view.View;
-import android.view.ViewGroup;
-import android.widget.CheckBox;
-import android.widget.EditText;
-import android.widget.RadioGroup;
-import android.widget.TextView;
-import java.net.InetAddress;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.util.Enumeration;
-
-public class SettingsMenuFragment extends Fragment
- implements RadioGroup.OnCheckedChangeListener {
-
- private String TAG;
- private MenuStateProvider stateProvider;
-
- EditText etRemoteIp;
-
- @Override
- public View onCreateView(LayoutInflater inflater, ViewGroup container,
- Bundle savedInstanceState) {
- View v = inflater.inflate(R.layout.settingsmenu, container, false);
-
- TAG = getResources().getString(R.string.tag);
-
- CheckBox cbAudio = (CheckBox) v.findViewById(R.id.cbAudio);
- cbAudio.setChecked(getEngine().audioEnabled());
- cbAudio.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbAudio = (CheckBox) checkBox;
- getEngine().setAudio(cbAudio.isChecked());
- cbAudio.setChecked(getEngine().audioEnabled());
- }
- });
- boolean loopback =
- getResources().getBoolean(R.bool.loopback_enabled_default);
- CheckBox cbLoopback = (CheckBox) v.findViewById(R.id.cbLoopback);
- cbLoopback.setChecked(loopback);
- cbLoopback.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- loopbackChanged((CheckBox) checkBox);
- }
- });
- etRemoteIp = (EditText) v.findViewById(R.id.etRemoteIp);
- etRemoteIp.setOnFocusChangeListener(new View.OnFocusChangeListener() {
- public void onFocusChange(View editText, boolean hasFocus) {
- if (!hasFocus) {
- getEngine().setRemoteIp(etRemoteIp.getText().toString());
- }
- }
- });
- // Has to be after remote IP as loopback changes it.
- loopbackChanged(cbLoopback);
- return v;
- }
-
- @Override
- public void onAttach(Activity activity) {
- super.onAttach(activity);
-
- // This makes sure that the container activity has implemented
- // the callback interface. If not, it throws an exception.
- try {
- stateProvider = (MenuStateProvider) activity;
- } catch (ClassCastException e) {
- throw new ClassCastException(activity +
- " must implement MenuStateProvider");
- }
- }
-
- private void loopbackChanged(CheckBox cbLoopback) {
- boolean loopback = cbLoopback.isChecked();
- etRemoteIp.setText(loopback ? getLoopbackIPString() : getLocalIpAddress());
- getEngine().setRemoteIp(etRemoteIp.getText().toString());
- }
-
- private String getLoopbackIPString() {
- return getResources().getString(R.string.loopbackIp);
- }
-
- private String getLocalIpAddress() {
- String localIp = "";
- try {
- for (Enumeration<NetworkInterface> en = NetworkInterface
- .getNetworkInterfaces(); en.hasMoreElements();) {
- NetworkInterface intf = en.nextElement();
- for (Enumeration<InetAddress> enumIpAddr =
- intf.getInetAddresses();
- enumIpAddr.hasMoreElements(); ) {
- InetAddress inetAddress = enumIpAddr.nextElement();
- if (!inetAddress.isLoopbackAddress()) {
- // Set the remote ip address the same as
- // the local ip address of the last netif
- localIp = inetAddress.getHostAddress().toString();
- }
- }
- }
- } catch (SocketException e) {
- Log.e(TAG, "Unable to get local IP address. Not the end of the world", e);
- }
- return localIp;
- }
-
- private MediaEngine getEngine() {
- return stateProvider.getEngine();
- }
-
- @Override
- public void onCheckedChanged(RadioGroup group, int checkedId) {
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
deleted file mode 100644
index fb04a7aac3..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.widget.ArrayAdapter;
-import android.content.Context;
-import android.widget.TextView;
-import android.view.View;
-import android.view.ViewGroup;
-import android.view.LayoutInflater;
-
-public class SpinnerAdapter extends ArrayAdapter<String> {
- private String[] menuItems;
- LayoutInflater inflater;
- int textViewResourceId;
-
- public SpinnerAdapter(Context context, int textViewResourceId,
- String[] objects, LayoutInflater inflater) {
- super(context, textViewResourceId, objects);
- menuItems = objects;
- this.inflater = inflater;
- this.textViewResourceId = textViewResourceId;
- }
-
- @Override public View getDropDownView(int position, View convertView,
- ViewGroup parent) {
- return getCustomView(position, convertView, parent);
- }
-
- @Override public View getView(int position, View convertView,
- ViewGroup parent) {
- return getCustomView(position, convertView, parent);
- }
-
- private View getCustomView(int position, View v, ViewGroup parent) {
- View row = inflater.inflate(textViewResourceId, parent, false);
- TextView label = (TextView) row.findViewById(R.id.spinner_row);
- label.setText(menuItems[position]);
- return row;
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
deleted file mode 100644
index 900355ad8e..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public class VoiceEngine {
- private final long nativeVoiceEngine;
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:NsModes
- public enum NsModes {
- UNCHANGED, DEFAULT, CONFERENCE, LOW_SUPPRESSION,
- MODERATE_SUPPRESSION, HIGH_SUPPRESSION, VERY_HIGH_SUPPRESSION
- }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:AgcModes
- public enum AgcModes {
- UNCHANGED, DEFAULT, ADAPTIVE_ANALOG, ADAPTIVE_DIGITAL,
- FIXED_DIGITAL
- }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:AecmModes
- public enum AecmModes {
- QUIET_EARPIECE_OR_HEADSET, EARPIECE, LOUD_EARPIECE,
- SPEAKERPHONE, LOUD_SPEAKERPHONE
- }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:EcModes
- public enum EcModes { UNCHANGED, DEFAULT, CONFERENCE, AEC, AECM }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:RtpDirections
- public enum RtpDirections { INCOMING, OUTGOING }
-
- public static class AgcConfig {
- AgcConfig(int targetLevelDbOv, int digitalCompressionGaindB,
- boolean limiterEnable) {
- this.targetLevelDbOv = targetLevelDbOv;
- this.digitalCompressionGaindB = digitalCompressionGaindB;
- this.limiterEnable = limiterEnable;
- }
- private final int targetLevelDbOv;
- private final int digitalCompressionGaindB;
- private final boolean limiterEnable;
- }
-
- public VoiceEngine() {
- nativeVoiceEngine = create();
- }
- private static native long create();
- public native int init();
- public native void dispose();
- public native int createChannel();
- public native int deleteChannel(int channel);
- public native int setLocalReceiver(int channel, int port);
- public native int setSendDestination(int channel, int port, String ipaddr);
- public native int startListen(int channel);
- public native int startPlayout(int channel);
- public native int startSend(int channel);
- public native int stopListen(int channel);
- public native int stopPlayout(int channel);
- public native int stopSend(int channel);
- public native int setSpeakerVolume(int volume);
- public native int setLoudspeakerStatus(boolean enable);
- public native int startPlayingFileLocally(
- int channel,
- String fileName,
- boolean loop);
- public native int stopPlayingFileLocally(int channel);
- public native int startPlayingFileAsMicrophone(
- int channel,
- String fileName,
- boolean loop);
- public native int stopPlayingFileAsMicrophone(int channel);
- public native int numOfCodecs();
- public native CodecInst getCodec(int index);
- public native int setSendCodec(int channel, CodecInst codec);
- public int setEcStatus(boolean enable, EcModes mode) {
- return setEcStatus(enable, mode.ordinal());
- }
- private native int setEcStatus(boolean enable, int ec_mode);
- public int setAecmMode(AecmModes aecm_mode, boolean cng) {
- return setAecmMode(aecm_mode.ordinal(), cng);
- }
- private native int setAecmMode(int aecm_mode, boolean cng);
- public int setAgcStatus(boolean enable, AgcModes agc_mode) {
- return setAgcStatus(enable, agc_mode.ordinal());
- }
- private native int setAgcStatus(boolean enable, int agc_mode);
- public native int setAgcConfig(AgcConfig agc_config);
- public int setNsStatus(boolean enable, NsModes ns_mode) {
- return setNsStatus(enable, ns_mode.ordinal());
- }
- private native int setNsStatus(boolean enable, int ns_mode);
- public native int startDebugRecording(String file);
- public native int stopDebugRecording();
- public int startRtpDump(int channel, String file,
- RtpDirections direction) {
- return startRtpDump(channel, file, direction.ordinal());
- }
- private native int startRtpDump(int channel, String file,
- int direction);
- public int stopRtpDump(int channel, RtpDirections direction) {
- return stopRtpDump(channel, direction.ordinal());
- }
- private native int stopRtpDump(int channel, int direction);
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
deleted file mode 100644
index 3b972cf126..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.ActionBar.Tab;
-import android.app.ActionBar.TabListener;
-import android.app.ActionBar;
-import android.app.Activity;
-import android.app.Fragment;
-import android.app.FragmentTransaction;
-import android.content.pm.ActivityInfo;
-import android.media.AudioManager;
-import android.os.Bundle;
-import android.os.Handler;
-import android.view.KeyEvent;
-import android.view.Menu;
-import android.view.MenuInflater;
-import android.view.MenuItem;
-import android.view.WindowManager;
-
-public class WebRTCDemo extends Activity implements MenuStateProvider {
-
- // From http://developer.android.com/guide/topics/ui/actionbar.html
- public static class TabListener<T extends Fragment>
- implements ActionBar.TabListener {
- private Fragment fragment;
- private final Activity activity;
- private final String tag;
- private final Class<T> instance;
- private final Bundle args;
-
- public TabListener(Activity activity, String tag, Class<T> clz) {
- this(activity, tag, clz, null);
- }
-
- public TabListener(Activity activity, String tag, Class<T> clz,
- Bundle args) {
- this.activity = activity;
- this.tag = tag;
- this.instance = clz;
- this.args = args;
- }
-
- public void onTabSelected(Tab tab, FragmentTransaction ft) {
- // Check if the fragment is already initialized
- if (fragment == null) {
- // If not, instantiate and add it to the activity
- fragment = Fragment.instantiate(activity, instance.getName(), args);
- ft.add(android.R.id.content, fragment, tag);
- } else {
- // If it exists, simply attach it in order to show it
- ft.attach(fragment);
- }
- }
-
- public void onTabUnselected(Tab tab, FragmentTransaction ft) {
- if (fragment != null) {
- // Detach the fragment, because another one is being attached
- ft.detach(fragment);
- }
- }
-
- public void onTabReselected(Tab tab, FragmentTransaction ft) {
- // User selected the already selected tab. Do nothing.
- }
- }
-
- private NativeWebRtcContextRegistry contextRegistry = null;
- private MediaEngine mediaEngine = null;
- private Handler handler;
- public MediaEngine getEngine() { return mediaEngine; }
-
- @Override
- public void onCreate(Bundle savedInstanceState) {
- super.onCreate(savedInstanceState);
-
- // Global settings.
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
-
- // State.
- // Must be instantiated before MediaEngine.
- contextRegistry = new NativeWebRtcContextRegistry();
- contextRegistry.register(this);
-
- // Load all settings dictated in xml.
- mediaEngine = new MediaEngine(this);
- mediaEngine.setRemoteIp(getResources().getString(R.string.loopbackIp));
-
- mediaEngine.setAudio(getResources().getBoolean(
- R.bool.audio_enabled_default));
- mediaEngine.setAudioCodec(mediaEngine.getIsacIndex());
- mediaEngine.setAudioRxPort(getResources().getInteger(
- R.integer.aRxPortDefault));
- mediaEngine.setAudioTxPort(getResources().getInteger(
- R.integer.aTxPortDefault));
- mediaEngine.setSpeaker(getResources().getBoolean(
- R.bool.speaker_enabled_default));
- mediaEngine.setDebuging(getResources().getBoolean(
- R.bool.apm_debug_enabled_default));
-
- // Create action bar with all tabs.
- ActionBar actionBar = getActionBar();
- actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
- actionBar.setDisplayShowTitleEnabled(false);
-
- Tab tab = actionBar.newTab()
- .setText("Main")
- .setTabListener(new TabListener<MainMenuFragment>(
- this, "main", MainMenuFragment.class));
- actionBar.addTab(tab);
-
- tab = actionBar.newTab()
- .setText("Settings")
- .setTabListener(new TabListener<SettingsMenuFragment>(
- this, "Settings", SettingsMenuFragment.class));
- actionBar.addTab(tab);
-
- tab = actionBar.newTab()
- .setText("Audio")
- .setTabListener(new TabListener<AudioMenuFragment>(
- this, "Audio", AudioMenuFragment.class));
- actionBar.addTab(tab);
-
- enableTimedStartStop();
-
- // Hint that voice call audio stream should be used for hardware volume
- // controls.
- setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
- }
-
- @Override
- public boolean onCreateOptionsMenu(Menu menu) {
- MenuInflater inflater = getMenuInflater();
- inflater.inflate(R.menu.main_activity_actions, menu);
- return super.onCreateOptionsMenu(menu);
- }
-
- @Override
- public boolean onOptionsItemSelected(MenuItem item) {
- // Handle presses on the action bar items
- switch (item.getItemId()) {
- case R.id.action_exit:
- MainMenuFragment main = (MainMenuFragment)getFragmentManager()
- .findFragmentByTag("main");
- main.stopAll();
- finish();
- return true;
- default:
- return super.onOptionsItemSelected(item);
- }
- }
-
- @Override
- public void onDestroy() {
- disableTimedStartStop();
- mediaEngine.dispose();
- contextRegistry.unRegister();
- super.onDestroy();
- }
-
- @Override
- public boolean onKeyDown(int keyCode, KeyEvent event) {
- if (keyCode == KeyEvent.KEYCODE_BACK) {
- // Prevent app from running in the background.
- MainMenuFragment main = (MainMenuFragment)getFragmentManager()
- .findFragmentByTag("main");
- main.stopAll();
- finish();
- return true;
- }
- return super.onKeyDown(keyCode, event);
- }
-
- private int getCallRestartPeriodicity() {
- return getResources().getInteger(R.integer.call_restart_periodicity_ms);
- }
-
- // Thread repeatedly calling start/stop.
- void enableTimedStartStop() {
- if (getCallRestartPeriodicity() > 0) {
- // Periodicity == 0 <-> Disabled.
- handler = new Handler();
- handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
- }
- }
-
- void disableTimedStartStop() {
- if (handler != null) {
- handler.removeCallbacks(startOrStopCallback);
- }
- }
-
- private Runnable startOrStopCallback = new Runnable() {
- public void run() {
- MainMenuFragment main = (MainMenuFragment)getFragmentManager()
- .findFragmentByTag("main");
- main.toggleStart();
- handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
- }
- };
-}
diff --git a/webrtc/examples/androidapp/AndroidManifest.xml b/webrtc/examples/androidapp/AndroidManifest.xml
index 6a91cfdeed..bd0dee821a 100644
--- a/webrtc/examples/androidapp/AndroidManifest.xml
+++ b/webrtc/examples/androidapp/AndroidManifest.xml
@@ -7,7 +7,7 @@
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
- <uses-sdk android:minSdkVersion="14" android:targetSdkVersion="21" />
+ <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
diff --git a/webrtc/examples/androidapp/res/values/strings.xml b/webrtc/examples/androidapp/res/values/strings.xml
index b3c55b438e..a5f64bad5b 100644
--- a/webrtc/examples/androidapp/res/values/strings.xml
+++ b/webrtc/examples/androidapp/res/values/strings.xml
@@ -71,6 +71,11 @@
<string name="pref_hwcodec_dlg">Use hardware accelerated video codec (if available).</string>
<string name="pref_hwcodec_default">true</string>
+ <string name="pref_capturetotexture_key">capturetotexture_preference</string>
+ <string name="pref_capturetotexture_title">Video capture to surface texture.</string>
+ <string name="pref_capturetotexture_dlg">Capture video to textures (if available).</string>
+ <string name="pref_capturetotexture_default">false</string>
+
<string name="pref_value_enabled">Enabled</string>
<string name="pref_value_disabled">Disabled</string>
@@ -97,14 +102,19 @@
<string name="pref_noaudioprocessing_dlg">Disable audio processing pipeline.</string>
<string name="pref_noaudioprocessing_default">false</string>
+ <string name="pref_aecdump_key">aecdump_preference</string>
+ <string name="pref_aecdump_title">Create aecdump.</string>
+ <string name="pref_aecdump_dlg">Enable diagnostic audio recordings.</string>
+ <string name="pref_aecdump_default">false</string>
+
+ <string name="pref_opensles_key">opensles_preference</string>
+ <string name="pref_opensles_title">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_dlg">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_default">false</string>
+
<string name="pref_miscsettings_key">misc_settings_key</string>
<string name="pref_miscsettings_title">Miscellaneous settings.</string>
- <string name="pref_cpu_usage_detection_key">cpu_usage_detection</string>
- <string name="pref_cpu_usage_detection_title">CPU overuse detection.</string>
- <string name="pref_cpu_usage_detection_dlg">Adapt transmission to CPU status.</string>
- <string name="pref_cpu_usage_detection_default" translatable="false">true</string>
-
<string name="pref_room_server_url_key">room_server_url_preference</string>
<string name="pref_room_server_url_title">Room server URL.</string>
<string name="pref_room_server_url_dlg">Enter a room server URL.</string>
@@ -115,4 +125,9 @@
<string name="pref_displayhud_dlg">Display call statistics.</string>
<string name="pref_displayhud_default" translatable="false">false</string>
+ <string name="pref_tracing_key">tracing_preference</string>
+ <string name="pref_tracing_title">Debug performance tracing.</string>
+ <string name="pref_tracing_dlg">Debug performance tracing.</string>
+ <string name="pref_tracing_default" translatable="false">false</string>
+
</resources>
diff --git a/webrtc/examples/androidapp/res/xml/preferences.xml b/webrtc/examples/androidapp/res/xml/preferences.xml
index c580e0cb77..0c6f916a2d 100644
--- a/webrtc/examples/androidapp/res/xml/preferences.xml
+++ b/webrtc/examples/androidapp/res/xml/preferences.xml
@@ -60,6 +60,12 @@
android:title="@string/pref_hwcodec_title"
android:dialogTitle="@string/pref_hwcodec_dlg"
android:defaultValue="@string/pref_hwcodec_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_capturetotexture_key"
+ android:title="@string/pref_capturetotexture_title"
+ android:dialogTitle="@string/pref_capturetotexture_dlg"
+ android:defaultValue="@string/pref_capturetotexture_default" />
</PreferenceCategory>
<PreferenceCategory
@@ -94,18 +100,24 @@
android:title="@string/pref_noaudioprocessing_title"
android:dialogTitle="@string/pref_noaudioprocessing_dlg"
android:defaultValue="@string/pref_noaudioprocessing_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_aecdump_key"
+ android:title="@string/pref_aecdump_title"
+ android:dialogTitle="@string/pref_aecdump_dlg"
+ android:defaultValue="@string/pref_aecdump_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_opensles_key"
+ android:title="@string/pref_opensles_title"
+ android:dialogTitle="@string/pref_opensles_dlg"
+ android:defaultValue="@string/pref_opensles_default" />
</PreferenceCategory>
<PreferenceCategory
android:key="@string/pref_miscsettings_key"
android:title="@string/pref_miscsettings_title">
- <CheckBoxPreference
- android:key="@string/pref_cpu_usage_detection_key"
- android:title="@string/pref_cpu_usage_detection_title"
- android:dialogTitle="@string/pref_cpu_usage_detection_dlg"
- android:defaultValue="@string/pref_cpu_usage_detection_default" />
-
<EditTextPreference
android:key="@string/pref_room_server_url_key"
android:title="@string/pref_room_server_url_title"
@@ -118,6 +130,12 @@
android:title="@string/pref_displayhud_title"
android:dialogTitle="@string/pref_displayhud_dlg"
android:defaultValue="@string/pref_displayhud_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_tracing_key"
+ android:title="@string/pref_tracing_title"
+ android:dialogTitle="@string/pref_tracing_dlg"
+ android:defaultValue="@string/pref_tracing_default" />
</PreferenceCategory>
</PreferenceScreen>
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
index 8ae7981e36..b9abf11eb3 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
@@ -22,7 +22,6 @@ import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
-import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
@@ -67,16 +66,21 @@ public class CallActivity extends Activity
"org.appspot.apprtc.VIDEOCODEC";
public static final String EXTRA_HWCODEC_ENABLED =
"org.appspot.apprtc.HWCODEC";
+ public static final String EXTRA_CAPTURETOTEXTURE_ENABLED =
+ "org.appspot.apprtc.CAPTURETOTEXTURE";
public static final String EXTRA_AUDIO_BITRATE =
"org.appspot.apprtc.AUDIO_BITRATE";
public static final String EXTRA_AUDIOCODEC =
"org.appspot.apprtc.AUDIOCODEC";
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
"org.appspot.apprtc.NOAUDIOPROCESSING";
- public static final String EXTRA_CPUOVERUSE_DETECTION =
- "org.appspot.apprtc.CPUOVERUSE_DETECTION";
+ public static final String EXTRA_AECDUMP_ENABLED =
+ "org.appspot.apprtc.AECDUMP";
+ public static final String EXTRA_OPENSLES_ENABLED =
+ "org.appspot.apprtc.OPENSLES";
public static final String EXTRA_DISPLAY_HUD =
"org.appspot.apprtc.DISPLAY_HUD";
+ public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
public static final String EXTRA_CMDLINE =
"org.appspot.apprtc.CMDLINE";
public static final String EXTRA_RUNTIME =
@@ -177,9 +181,9 @@ public class CallActivity extends Activity
remoteRender.setOnClickListener(listener);
// Create video renderers.
- rootEglBase = new EglBase();
- localRender.init(rootEglBase.getContext(), null);
- remoteRender.init(rootEglBase.getContext(), null);
+ rootEglBase = EglBase.create();
+ localRender.init(rootEglBase.getEglBaseContext(), null);
+ remoteRender.init(rootEglBase.getEglBaseContext(), null);
localRender.setZOrderMediaOverlay(true);
updateVideoView();
@@ -212,19 +216,23 @@ public class CallActivity extends Activity
return;
}
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
+ boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
peerConnectionParameters = new PeerConnectionParameters(
intent.getBooleanExtra(EXTRA_VIDEO_CALL, true),
loopback,
+ tracing,
intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0),
intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0),
intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
+ intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0),
intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
- intent.getBooleanExtra(EXTRA_CPUOVERUSE_DETECTION, true));
+ intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false));
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
@@ -246,6 +254,7 @@ public class CallActivity extends Activity
// For command line execution run connection for <runTimeMs> and exit.
if (commandLineRun && runTimeMs > 0) {
(new Handler()).postDelayed(new Runnable() {
+ @Override
public void run() {
disconnect();
}
@@ -480,7 +489,7 @@ public class CallActivity extends Activity
signalingParameters = params;
logAndToast("Creating peer connection, delay=" + delta + "ms");
- peerConnectionClient.createPeerConnection(rootEglBase.getContext(),
+ peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(),
localRender, remoteRender, signalingParameters);
if (signalingParameters.initiator) {
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
index 0bdaebb5b0..e55dba0cba 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
@@ -65,9 +65,12 @@ public class ConnectActivity extends Activity {
private String keyprefAudioBitrateValue;
private String keyprefAudioCodec;
private String keyprefHwCodecAcceleration;
+ private String keyprefCaptureToTexture;
private String keyprefNoAudioProcessingPipeline;
- private String keyprefCpuUsageDetection;
+ private String keyprefAecDump;
+ private String keyprefOpenSLES;
private String keyprefDisplayHud;
+ private String keyprefTracing;
private String keyprefRoomServerUrl;
private String keyprefRoom;
private String keyprefRoomList;
@@ -89,12 +92,15 @@ public class ConnectActivity extends Activity {
keyprefVideoBitrateValue = getString(R.string.pref_startvideobitratevalue_key);
keyprefVideoCodec = getString(R.string.pref_videocodec_key);
keyprefHwCodecAcceleration = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
keyprefAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
keyprefAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
keyprefAudioCodec = getString(R.string.pref_audiocodec_key);
keyprefNoAudioProcessingPipeline = getString(R.string.pref_noaudioprocessing_key);
- keyprefCpuUsageDetection = getString(R.string.pref_cpu_usage_detection_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
keyprefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyprefTracing = getString(R.string.pref_tracing_key);
keyprefRoomServerUrl = getString(R.string.pref_room_server_url_key);
keyprefRoom = getString(R.string.pref_room_key);
keyprefRoomList = getString(R.string.pref_room_list_key);
@@ -253,11 +259,25 @@ public class ConnectActivity extends Activity {
boolean hwCodec = sharedPref.getBoolean(keyprefHwCodecAcceleration,
Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
+ // Check Capture to texture.
+ boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
+ Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
+
// Check Disable Audio Processing flag.
boolean noAudioProcessing = sharedPref.getBoolean(
keyprefNoAudioProcessingPipeline,
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
+ // Check Disable Audio Processing flag.
+ boolean aecDump = sharedPref.getBoolean(
+ keyprefAecDump,
+ Boolean.valueOf(getString(R.string.pref_aecdump_default)));
+
+ // Check OpenSL ES enabled flag.
+ boolean useOpenSLES = sharedPref.getBoolean(
+ keyprefOpenSLES,
+ Boolean.valueOf(getString(R.string.pref_opensles_default)));
+
// Get video resolution from settings.
int videoWidth = 0;
int videoHeight = 0;
@@ -313,16 +333,13 @@ public class ConnectActivity extends Activity {
audioStartBitrate = Integer.parseInt(bitrateValue);
}
- // Test if CpuOveruseDetection should be disabled. By default is on.
- boolean cpuOveruseDetection = sharedPref.getBoolean(
- keyprefCpuUsageDetection,
- Boolean.valueOf(
- getString(R.string.pref_cpu_usage_detection_default)));
-
// Check statistics display option.
boolean displayHud = sharedPref.getBoolean(keyprefDisplayHud,
Boolean.valueOf(getString(R.string.pref_displayhud_default)));
+ boolean tracing = sharedPref.getBoolean(
+ keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
+
// Start AppRTCDemo activity.
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
if (validateUrl(roomUrl)) {
@@ -340,13 +357,15 @@ public class ConnectActivity extends Activity {
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
+ intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED,
noAudioProcessing);
+ intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
+ intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
- intent.putExtra(CallActivity.EXTRA_CPUOVERUSE_DETECTION,
- cpuOveruseDetection);
intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
+ intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
index 263046b2e9..c41dd66345 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -11,12 +11,15 @@
package org.appspot.apprtc;
import android.content.Context;
+import android.os.ParcelFileDescriptor;
+import android.os.Environment;
import android.util.Log;
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
import org.appspot.apprtc.util.LooperExecutor;
import org.webrtc.CameraEnumerationAndroid;
import org.webrtc.DataChannel;
+import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.MediaCodecVideoEncoder;
@@ -34,7 +37,10 @@ import org.webrtc.VideoCapturerAndroid;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
+import org.webrtc.voiceengine.WebRtcAudioManager;
+import java.io.File;
+import java.io.IOException;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.Timer;
@@ -42,8 +48,6 @@ import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import javax.microedition.khronos.egl.EGLContext;
-
/**
* Peer connection client implementation.
*
@@ -55,7 +59,6 @@ public class PeerConnectionClient {
public static final String VIDEO_TRACK_ID = "ARDAMSv0";
public static final String AUDIO_TRACK_ID = "ARDAMSa0";
private static final String TAG = "PCRTCClient";
- private static final String FIELD_TRIAL_VP9 = "WebRTC-SupportVP9/Enabled/";
private static final String FIELD_TRIAL_AUTOMATIC_RESIZE =
"WebRTC-MediaCodecVideoEncoder-AutomaticResize/Enabled/";
private static final String VIDEO_CODEC_VP8 = "VP8";
@@ -94,7 +97,7 @@ public class PeerConnectionClient {
private VideoSource videoSource;
private boolean videoCallEnabled;
private boolean preferIsac;
- private boolean preferH264;
+ private String preferredVideoCodec;
private boolean videoSourceStopped;
private boolean isError;
private Timer statsTimer;
@@ -104,6 +107,7 @@ public class PeerConnectionClient {
private MediaConstraints pcConstraints;
private MediaConstraints videoConstraints;
private MediaConstraints audioConstraints;
+ private ParcelFileDescriptor aecDumpFileDescriptor;
private MediaConstraints sdpMediaConstraints;
private PeerConnectionParameters peerConnectionParameters;
// Queued remote ICE candidates are consumed only after both local and
@@ -127,35 +131,41 @@ public class PeerConnectionClient {
public static class PeerConnectionParameters {
public final boolean videoCallEnabled;
public final boolean loopback;
+ public final boolean tracing;
public final int videoWidth;
public final int videoHeight;
public final int videoFps;
public final int videoStartBitrate;
public final String videoCodec;
public final boolean videoCodecHwAcceleration;
+ public final boolean captureToTexture;
public final int audioStartBitrate;
public final String audioCodec;
public final boolean noAudioProcessing;
- public final boolean cpuOveruseDetection;
+ public final boolean aecDump;
+ public final boolean useOpenSLES;
public PeerConnectionParameters(
- boolean videoCallEnabled, boolean loopback,
+ boolean videoCallEnabled, boolean loopback, boolean tracing,
int videoWidth, int videoHeight, int videoFps, int videoStartBitrate,
- String videoCodec, boolean videoCodecHwAcceleration,
+ String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
int audioStartBitrate, String audioCodec,
- boolean noAudioProcessing, boolean cpuOveruseDetection) {
+ boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES) {
this.videoCallEnabled = videoCallEnabled;
this.loopback = loopback;
+ this.tracing = tracing;
this.videoWidth = videoWidth;
this.videoHeight = videoHeight;
this.videoFps = videoFps;
this.videoStartBitrate = videoStartBitrate;
this.videoCodec = videoCodec;
this.videoCodecHwAcceleration = videoCodecHwAcceleration;
+ this.captureToTexture = captureToTexture;
this.audioStartBitrate = audioStartBitrate;
this.audioCodec = audioCodec;
this.noAudioProcessing = noAudioProcessing;
- this.cpuOveruseDetection = cpuOveruseDetection;
+ this.aecDump = aecDump;
+ this.useOpenSLES = useOpenSLES;
}
}
@@ -228,7 +238,6 @@ public class PeerConnectionClient {
factory = null;
peerConnection = null;
preferIsac = false;
- preferH264 = false;
videoSourceStopped = false;
isError = false;
queuedRemoteCandidates = null;
@@ -249,7 +258,7 @@ public class PeerConnectionClient {
}
public void createPeerConnection(
- final EGLContext renderEGLContext,
+ final EglBase.Context renderEGLContext,
final VideoRenderer.Callbacks localRender,
final VideoRenderer.Callbacks remoteRender,
final SignalingParameters signalingParameters) {
@@ -283,31 +292,47 @@ public class PeerConnectionClient {
}
private void createPeerConnectionFactoryInternal(Context context) {
+ PeerConnectionFactory.initializeInternalTracer();
+ if (peerConnectionParameters.tracing) {
+ PeerConnectionFactory.startInternalTracingCapture(
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ + "webrtc-trace.txt");
+ }
Log.d(TAG, "Create peer connection factory. Use video: " +
peerConnectionParameters.videoCallEnabled);
isError = false;
// Initialize field trials.
- String field_trials = FIELD_TRIAL_AUTOMATIC_RESIZE;
- // Check if VP9 is used by default.
- if (videoCallEnabled && peerConnectionParameters.videoCodec != null
- && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
- field_trials += FIELD_TRIAL_VP9;
+ PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_AUTOMATIC_RESIZE);
+
+ // Check preferred video codec.
+ preferredVideoCodec = VIDEO_CODEC_VP8;
+ if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
+ if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
+ preferredVideoCodec = VIDEO_CODEC_VP9;
+ } else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
+ preferredVideoCodec = VIDEO_CODEC_H264;
+ }
}
- PeerConnectionFactory.initializeFieldTrials(field_trials);
+ Log.d(TAG, "Pereferred video codec: " + preferredVideoCodec);
- // Check if H.264 is used by default.
- preferH264 = false;
- if (videoCallEnabled && peerConnectionParameters.videoCodec != null
- && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
- preferH264 = true;
- }
// Check if ISAC is used by default.
preferIsac = false;
if (peerConnectionParameters.audioCodec != null
&& peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
preferIsac = true;
}
+
+ // Enable/disable OpenSL ES playback.
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
+ } else {
+ Log.d(TAG, "Allow OpenSL ES audio if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
+ }
+
+ // Create peer connection factory.
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
peerConnectionParameters.videoCodecHwAcceleration)) {
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
@@ -405,7 +430,7 @@ public class PeerConnectionClient {
}
}
- private void createPeerConnectionInternal(EGLContext renderEGLContext) {
+ private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
if (factory == null || isError) {
Log.e(TAG, "Peerconnection factory is not created");
return;
@@ -420,7 +445,7 @@ public class PeerConnectionClient {
if (videoCallEnabled) {
Log.d(TAG, "EGLContext: " + renderEGLContext);
- factory.setVideoHwAccelerationOptions(renderEGLContext);
+ factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
}
PeerConnection.RTCConfiguration rtcConfig =
@@ -453,7 +478,8 @@ public class PeerConnectionClient {
cameraDeviceName = frontCameraDeviceName;
}
Log.d(TAG, "Opening camera: " + cameraDeviceName);
- videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null);
+ videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null,
+ peerConnectionParameters.captureToTexture ? renderEGLContext : null);
if (videoCapturer == null) {
reportError("Failed to open camera");
return;
@@ -466,10 +492,26 @@ public class PeerConnectionClient {
factory.createAudioSource(audioConstraints)));
peerConnection.addStream(mediaStream);
+ if (peerConnectionParameters.aecDump) {
+ try {
+ aecDumpFileDescriptor = ParcelFileDescriptor.open(
+ new File("/sdcard/Download/audio.aecdump"),
+ ParcelFileDescriptor.MODE_READ_WRITE |
+ ParcelFileDescriptor.MODE_CREATE |
+ ParcelFileDescriptor.MODE_TRUNCATE);
+ factory.startAecDump(aecDumpFileDescriptor.getFd());
+ } catch(IOException e) {
+ Log.e(TAG, "Can not open aecdump file", e);
+ }
+ }
+
Log.d(TAG, "Peer connection created.");
}
private void closeInternal() {
+ if (factory != null && peerConnectionParameters.aecDump) {
+ factory.stopAecDump();
+ }
Log.d(TAG, "Closing peer connection.");
statsTimer.cancel();
if (peerConnection != null) {
@@ -489,6 +531,8 @@ public class PeerConnectionClient {
options = null;
Log.d(TAG, "Closing peer connection done.");
events.onPeerConnectionClosed();
+ PeerConnectionFactory.stopInternalTracingCapture();
+ PeerConnectionFactory.shutdownInternalTracer();
}
public boolean isHDVideo() {
@@ -623,8 +667,8 @@ public class PeerConnectionClient {
if (preferIsac) {
sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
}
- if (videoCallEnabled && preferH264) {
- sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false);
+ if (videoCallEnabled) {
+ sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
}
if (videoCallEnabled && peerConnectionParameters.videoStartBitrate > 0) {
sdpDescription = setStartBitrate(VIDEO_CODEC_VP8, true,
@@ -972,8 +1016,8 @@ public class PeerConnectionClient {
if (preferIsac) {
sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
}
- if (videoCallEnabled && preferH264) {
- sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false);
+ if (videoCallEnabled) {
+ sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
}
final SessionDescription sdp = new SessionDescription(
origSdp.type, sdpDescription);
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
index 9ad6e4d8e4..06a2d06802 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
@@ -30,15 +30,18 @@ public class SettingsActivity extends Activity
private String keyprefStartVideoBitrateValue;
private String keyPrefVideoCodec;
private String keyprefHwCodec;
+ private String keyprefCaptureToTexture;
private String keyprefStartAudioBitrateType;
private String keyprefStartAudioBitrateValue;
private String keyPrefAudioCodec;
private String keyprefNoAudioProcessing;
+ private String keyprefAecDump;
+ private String keyprefOpenSLES;
- private String keyprefCpuUsageDetection;
private String keyPrefRoomServerUrl;
private String keyPrefDisplayHud;
+ private String keyPrefTracing;
@Override
protected void onCreate(Bundle savedInstanceState) {
@@ -51,15 +54,18 @@ public class SettingsActivity extends Activity
keyprefStartVideoBitrateValue = getString(R.string.pref_startvideobitratevalue_key);
keyPrefVideoCodec = getString(R.string.pref_videocodec_key);
keyprefHwCodec = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
keyprefStartAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
keyprefStartAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
keyPrefAudioCodec = getString(R.string.pref_audiocodec_key);
keyprefNoAudioProcessing = getString(R.string.pref_noaudioprocessing_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
- keyprefCpuUsageDetection = getString(R.string.pref_cpu_usage_detection_key);
keyPrefRoomServerUrl = getString(R.string.pref_room_server_url_key);
keyPrefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyPrefTracing = getString(R.string.pref_tracing_key);
// Display the fragment as the main content.
settingsFragment = new SettingsFragment();
@@ -84,16 +90,19 @@ public class SettingsActivity extends Activity
setVideoBitrateEnable(sharedPreferences);
updateSummary(sharedPreferences, keyPrefVideoCodec);
updateSummaryB(sharedPreferences, keyprefHwCodec);
+ updateSummaryB(sharedPreferences, keyprefCaptureToTexture);
updateSummary(sharedPreferences, keyprefStartAudioBitrateType);
updateSummaryBitrate(sharedPreferences, keyprefStartAudioBitrateValue);
setAudioBitrateEnable(sharedPreferences);
updateSummary(sharedPreferences, keyPrefAudioCodec);
updateSummaryB(sharedPreferences, keyprefNoAudioProcessing);
+ updateSummaryB(sharedPreferences, keyprefAecDump);
+ updateSummaryB(sharedPreferences, keyprefOpenSLES);
- updateSummaryB(sharedPreferences, keyprefCpuUsageDetection);
updateSummary(sharedPreferences, keyPrefRoomServerUrl);
updateSummaryB(sharedPreferences, keyPrefDisplayHud);
+ updateSummaryB(sharedPreferences, keyPrefTracing);
}
@Override
@@ -119,10 +128,13 @@ public class SettingsActivity extends Activity
|| key.equals(keyprefStartAudioBitrateValue)) {
updateSummaryBitrate(sharedPreferences, key);
} else if (key.equals(keyprefVideoCall)
+ || key.equals(keyPrefTracing)
|| key.equals(keyprefCaptureQualitySlider)
|| key.equals(keyprefHwCodec)
+ || key.equals(keyprefCaptureToTexture)
|| key.equals(keyprefNoAudioProcessing)
- || key.equals(keyprefCpuUsageDetection)
+ || key.equals(keyprefAecDump)
+ || key.equals(keyprefOpenSLES)
|| key.equals(keyPrefDisplayHud)) {
updateSummaryB(sharedPreferences, key);
}
diff --git a/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java b/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
index 5a5034b340..16a9fb3612 100644
--- a/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
+++ b/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
@@ -22,7 +22,7 @@ import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
import org.appspot.apprtc.util.LooperExecutor;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
-import org.webrtc.MediaConstraints;
+import org.webrtc.MediaCodecVideoEncoder;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SessionDescription;
@@ -225,7 +225,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
PeerConnectionClient createPeerConnectionClient(
MockRenderer localRenderer, MockRenderer remoteRenderer,
- PeerConnectionParameters peerConnectionParameters, boolean decodeToTexture) {
+ PeerConnectionParameters peerConnectionParameters, boolean useTexures) {
List<PeerConnection.IceServer> iceServers =
new LinkedList<PeerConnection.IceServer>();
SignalingParameters signalingParameters = new SignalingParameters(
@@ -240,19 +240,28 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
client.setPeerConnectionFactoryOptions(options);
client.createPeerConnectionFactory(
getInstrumentation().getContext(), peerConnectionParameters, this);
- client.createPeerConnection(decodeToTexture ? eglBase.getContext() : null,
+ client.createPeerConnection(useTexures ? eglBase.getEglBaseContext() : null,
localRenderer, remoteRenderer, signalingParameters);
client.createOffer();
return client;
}
- private PeerConnectionParameters createParameters(boolean enableVideo,
- String videoCodec) {
+ private PeerConnectionParameters createParametersForAudioCall() {
PeerConnectionParameters peerConnectionParameters =
new PeerConnectionParameters(
- enableVideo, true, // videoCallEnabled, loopback.
- 0, 0, 0, 0, videoCodec, true, // video codec parameters.
- 0, "OPUS", false, true); // audio codec parameters.
+ false, true, false, // videoCallEnabled, loopback, tracing.
+ 0, 0, 0, 0, "", true, false, // video codec parameters.
+ 0, "OPUS", false, false, false); // audio codec parameters.
+ return peerConnectionParameters;
+ }
+
+ private PeerConnectionParameters createParametersForVideoCall(
+ String videoCodec, boolean captureToTexture) {
+ PeerConnectionParameters peerConnectionParameters =
+ new PeerConnectionParameters(
+ true, true, false, // videoCallEnabled, loopback, tracing.
+ 0, 0, 0, 0, videoCodec, true, captureToTexture, // video codec parameters.
+ 0, "OPUS", false, false, false); // audio codec parameters.
return peerConnectionParameters;
}
@@ -261,7 +270,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
signalingExecutor = new LooperExecutor();
signalingExecutor.requestStart();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
- eglBase = new EglBase();
+ eglBase = EglBase.create();
}
}
@@ -278,7 +287,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
pcClient = createPeerConnectionClient(
- localRenderer, new MockRenderer(0, null), createParameters(true, VIDEO_CODEC_VP8), false);
+ localRenderer, new MockRenderer(0, null),
+ createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
// Wait for local SDP and ice candidates set events.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@@ -338,46 +348,74 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
}
public void testLoopbackAudio() throws InterruptedException {
- doLoopbackTest(createParameters(false, VIDEO_CODEC_VP8), false);
+ doLoopbackTest(createParametersForAudioCall(), false);
}
public void testLoopbackVp8() throws InterruptedException {
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP8), false);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
}
public void DISABLED_testLoopbackVp9() throws InterruptedException {
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP9), false);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false);
}
public void testLoopbackH264() throws InterruptedException {
- doLoopbackTest(createParameters(true, VIDEO_CODEC_H264), false);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false);
}
public void testLoopbackVp8DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
- Log.i(TAG, "Decode to textures is not supported, requires EGL14.");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
-
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP8), true);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true);
}
public void DISABLED_testLoopbackVp9DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
- Log.i(TAG, "Decode to textures is not supported, requires EGL14.");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP9), true);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true);
}
public void testLoopbackH264DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
- Log.i(TAG, "Decode to textures is not supported, requires EGL14.");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
- doLoopbackTest(createParameters(true, VIDEO_CODEC_H264), true);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true);
}
+ public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19");
+ return;
+ }
+ // TODO(perkj): If we can always capture to textures, there is no need to check if the
+ // hardware encoder supports to encode from a texture.
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "VP8 encode to textures is not supported.");
+ return;
+ }
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true);
+ }
+
+ public void testLoopbackH264CaptureToTexture() throws InterruptedException {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Encode to textures is not supported. Requires KITKAT");
+ return;
+ }
+ // TODO(perkj): If we can always capture to textures, there is no need to check if the
+ // hardware encoder supports to encode from a texture.
+ if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) {
+ Log.i(TAG, "H264 encode to textures is not supported.");
+ return;
+ }
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
+ }
+
+
// Checks if default front camera can be switched to back camera and then
// again to front camera.
public void testCameraSwitch() throws InterruptedException {
@@ -388,7 +426,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
- localRenderer, remoteRenderer, createParameters(true, VIDEO_CODEC_VP8), false);
+ localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@@ -434,7 +472,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
- localRenderer, remoteRenderer, createParameters(true, VIDEO_CODEC_VP8), false);
+ localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
index 0f4165ebac..9568b94e77 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
@@ -29,7 +29,7 @@
ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
_window.rootViewController = viewController;
-#ifndef _DEBUG
+#if defined(NDEBUG)
// In debug builds the default level is LS_INFO and in non-debug builds it is
// disabled. Continue to log to console in non-debug builds, but only
// warnings and errors.
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
index 3c9e46e148..e809cb3027 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
@@ -21,15 +21,8 @@ static CGFloat const kRoomTextFieldMargin = 8;
static CGFloat const kCallControlMargin = 8;
static CGFloat const kAppLabelHeight = 20;
-@class ARDRoomTextField;
-@protocol ARDRoomTextFieldDelegate <NSObject>
-- (void)roomTextField:(ARDRoomTextField *)roomTextField
- didInputRoom:(NSString *)room;
-@end
-
// Helper view that contains a text field and a clear button.
@interface ARDRoomTextField : UIView <UITextFieldDelegate>
-@property(nonatomic, weak) id<ARDRoomTextFieldDelegate> delegate;
@property(nonatomic, readonly) NSString *roomText;
@end
@@ -38,14 +31,14 @@ static CGFloat const kAppLabelHeight = 20;
UIButton *_clearButton;
}
-@synthesize delegate = _delegate;
-
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
_roomText.borderStyle = UITextBorderStyleNone;
_roomText.font = [UIFont fontWithName:@"Roboto" size:12];
_roomText.placeholder = @"Room name";
+ _roomText.autocorrectionType = UITextAutocorrectionTypeNo;
+ _roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
_roomText.delegate = self;
[_roomText addTarget:self
action:@selector(textFieldDidChange:)
@@ -96,10 +89,6 @@ static CGFloat const kAppLabelHeight = 20;
#pragma mark - UITextFieldDelegate
-- (void)textFieldDidEndEditing:(UITextField *)textField {
- [_delegate roomTextField:self didInputRoom:textField.text];
-}
-
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
// There is no other control that can take focus, so manually resign focus
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
@@ -125,9 +114,6 @@ static CGFloat const kAppLabelHeight = 20;
@end
-@interface ARDMainView () <ARDRoomTextFieldDelegate>
-@end
-
@implementation ARDMainView {
UILabel *_appLabel;
ARDRoomTextField *_roomText;
@@ -151,7 +137,6 @@ static CGFloat const kAppLabelHeight = 20;
[self addSubview:_appLabel];
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
- _roomText.delegate = self;
[self addSubview:_roomText];
UIFont *controlFont = [UIFont fontWithName:@"Roboto" size:20];
@@ -260,16 +245,6 @@ static CGFloat const kAppLabelHeight = 20;
_startCallButton.frame.size.height);
}
-#pragma mark - ARDRoomTextFieldDelegate
-
-- (void)roomTextField:(ARDRoomTextField *)roomTextField
- didInputRoom:(NSString *)room {
- [_delegate mainView:self
- didInputRoom:room
- isLoopback:NO
- isAudioOnly:_audioOnlySwitch.isOn];
-}
-
#pragma mark - Private
- (void)onStartCall:(id)sender {
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
index 209bcd462c..378281d005 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
@@ -10,6 +10,7 @@
#import <UIKit/UIKit.h>
+#import "webrtc/base/objc/RTCCameraPreviewView.h"
#import "RTCEAGLVideoView.h"
#import "ARDStatsView.h"
@@ -33,7 +34,7 @@
@interface ARDVideoCallView : UIView
@property(nonatomic, readonly) UILabel *statusLabel;
-@property(nonatomic, readonly) RTCEAGLVideoView *localVideoView;
+@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
@property(nonatomic, readonly) RTCEAGLVideoView *remoteVideoView;
@property(nonatomic, readonly) ARDStatsView *statsView;
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
index 4048b84bb2..4c9c9d284e 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
@@ -25,7 +25,6 @@ static CGFloat const kStatusBarHeight = 20;
@implementation ARDVideoCallView {
UIButton *_cameraSwitchButton;
UIButton *_hangupButton;
- CGSize _localVideoSize;
CGSize _remoteVideoSize;
BOOL _useRearCamera;
}
@@ -42,10 +41,7 @@ static CGFloat const kStatusBarHeight = 20;
_remoteVideoView.delegate = self;
[self addSubview:_remoteVideoView];
- // TODO(tkchin): replace this with a view that renders layer from
- // AVCaptureSession.
- _localVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
- _localVideoView.delegate = self;
+ _localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
[self addSubview:_localVideoView];
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
@@ -114,22 +110,15 @@ static CGFloat const kStatusBarHeight = 20;
_remoteVideoView.frame = bounds;
}
- if (_localVideoSize.width && _localVideoSize.height > 0) {
- // Aspect fit local video view into a square box.
- CGRect localVideoFrame =
- CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
- localVideoFrame =
- AVMakeRectWithAspectRatioInsideRect(_localVideoSize, localVideoFrame);
-
- // Place the view in the bottom right.
- localVideoFrame.origin.x = CGRectGetMaxX(bounds)
- - localVideoFrame.size.width - kLocalVideoViewPadding;
- localVideoFrame.origin.y = CGRectGetMaxY(bounds)
- - localVideoFrame.size.height - kLocalVideoViewPadding;
- _localVideoView.frame = localVideoFrame;
- } else {
- _localVideoView.frame = bounds;
- }
+ // Aspect fit local video view into a square box.
+ CGRect localVideoFrame =
+ CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
+ // Place the view in the bottom right.
+ localVideoFrame.origin.x = CGRectGetMaxX(bounds)
+ - localVideoFrame.size.width - kLocalVideoViewPadding;
+ localVideoFrame.origin.y = CGRectGetMaxY(bounds)
+ - localVideoFrame.size.height - kLocalVideoViewPadding;
+ _localVideoView.frame = localVideoFrame;
// Place stats at the top.
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
@@ -159,10 +148,7 @@ static CGFloat const kStatusBarHeight = 20;
#pragma mark - RTCEAGLVideoViewDelegate
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
- if (videoView == _localVideoView) {
- _localVideoSize = size;
- _localVideoView.hidden = CGSizeEqualToSize(CGSizeZero, _localVideoSize);
- } else if (videoView == _remoteVideoView) {
+ if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
}
[self setNeedsLayout];
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
index 8de6b959f0..51290a05b5 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
@@ -128,18 +128,21 @@
if (_localVideoTrack == localVideoTrack) {
return;
}
- [_localVideoTrack removeRenderer:_videoCallView.localVideoView];
_localVideoTrack = nil;
- [_videoCallView.localVideoView renderFrame:nil];
_localVideoTrack = localVideoTrack;
- [_localVideoTrack addRenderer:_videoCallView.localVideoView];
+ RTCAVFoundationVideoSource *source = nil;
+ if ([localVideoTrack.source
+ isKindOfClass:[RTCAVFoundationVideoSource class]]) {
+ source = (RTCAVFoundationVideoSource*)localVideoTrack.source;
+ }
+ _videoCallView.localVideoView.captureSession = source.captureSession;
}
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
if (_remoteVideoTrack == remoteVideoTrack) {
return;
}
- [_remoteVideoTrack removeRenderer:_videoCallView.localVideoView];
+ [_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
_remoteVideoTrack = nil;
[_videoCallView.remoteVideoView renderFrame:nil];
_remoteVideoTrack = remoteVideoTrack;
diff --git a/webrtc/examples/peerconnection/client/conductor.cc b/webrtc/examples/peerconnection/client/conductor.cc
index e3def9955f..883f44a77b 100644
--- a/webrtc/examples/peerconnection/client/conductor.cc
+++ b/webrtc/examples/peerconnection/client/conductor.cc
@@ -113,28 +113,22 @@ bool Conductor::CreatePeerConnection(bool dtls) {
ASSERT(peer_connection_factory_.get() != NULL);
ASSERT(peer_connection_.get() == NULL);
- webrtc::PeerConnectionInterface::IceServers servers;
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
webrtc::PeerConnectionInterface::IceServer server;
server.uri = GetPeerConnectionString();
- servers.push_back(server);
+ config.servers.push_back(server);
webrtc::FakeConstraints constraints;
if (dtls) {
constraints.AddOptional(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
"true");
- }
- else
- {
+ } else {
constraints.AddOptional(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
"false");
}
- peer_connection_ =
- peer_connection_factory_->CreatePeerConnection(servers,
- &constraints,
- NULL,
- NULL,
- this);
+ peer_connection_ = peer_connection_factory_->CreatePeerConnection(
+ config, &constraints, NULL, NULL, this);
return peer_connection_.get() != NULL;
}
diff --git a/webrtc/examples/peerconnection/client/conductor.h b/webrtc/examples/peerconnection/client/conductor.h
index f5f16a3d10..e5ee170299 100644
--- a/webrtc/examples/peerconnection/client/conductor.h
+++ b/webrtc/examples/peerconnection/client/conductor.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
-#define TALK_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
#pragma once
#include <deque>
@@ -126,4 +126,4 @@ class Conductor
std::string server_;
};
-#endif // TALK_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
diff --git a/webrtc/examples/peerconnection/client/defaults.cc b/webrtc/examples/peerconnection/client/defaults.cc
index 3090c15ca1..a2501c718f 100644
--- a/webrtc/examples/peerconnection/client/defaults.cc
+++ b/webrtc/examples/peerconnection/client/defaults.cc
@@ -19,6 +19,7 @@
#include <unistd.h>
#endif
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
const char kAudioLabel[] = "audio_label";
@@ -49,10 +50,12 @@ std::string GetDefaultServerName() {
std::string GetPeerName() {
char computer_name[256];
- if (gethostname(computer_name, ARRAY_SIZE(computer_name)) != 0)
- strcpy(computer_name, "host");
std::string ret(GetEnvVarOrDefault("USERNAME", "user"));
ret += '@';
- ret += computer_name;
+ if (gethostname(computer_name, arraysize(computer_name)) == 0) {
+ ret += computer_name;
+ } else {
+ ret += "host";
+ }
return ret;
}
diff --git a/webrtc/examples/peerconnection/client/defaults.h b/webrtc/examples/peerconnection/client/defaults.h
index 7b503974e5..f4d3bf52f7 100644
--- a/webrtc/examples/peerconnection/client/defaults.h
+++ b/webrtc/examples/peerconnection/client/defaults.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
#pragma once
#include <string>
@@ -27,4 +27,4 @@ std::string GetPeerConnectionString();
std::string GetDefaultServerName();
std::string GetPeerName();
-#endif // PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
diff --git a/webrtc/examples/peerconnection/client/flagdefs.h b/webrtc/examples/peerconnection/client/flagdefs.h
index 0cffffb135..92e2773166 100644
--- a/webrtc/examples/peerconnection/client/flagdefs.h
+++ b/webrtc/examples/peerconnection/client/flagdefs.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
-#define TALK_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
#pragma once
#include "webrtc/base/flags.h"
@@ -30,4 +30,4 @@ DEFINE_bool(autocall, false, "Call the first available other client on "
"the server without user intervention. Note: this flag should only be set "
"to true on one of the two clients.");
-#endif // TALK_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
diff --git a/webrtc/examples/peerconnection/client/linux/main.cc b/webrtc/examples/peerconnection/client/linux/main.cc
index cf88c36fbb..4db929c82e 100644
--- a/webrtc/examples/peerconnection/client/linux/main.cc
+++ b/webrtc/examples/peerconnection/client/linux/main.cc
@@ -30,7 +30,7 @@ class CustomSocketServer : public rtc::PhysicalSocketServer {
// Override so that we can also pump the GTK message loop.
virtual bool Wait(int cms, bool process_io) {
// Pump GTK events.
- // TODO: We really should move either the socket server or UI to a
+ // TODO(henrike): We really should move either the socket server or UI to a
// different thread. Alternatively we could look at merging the two loops
// by implementing a dispatcher for the socket server and/or use
// g_main_context_set_poll_func.
@@ -96,10 +96,12 @@ int main(int argc, char* argv[]) {
wnd.Destroy();
thread->set_socketserver(NULL);
- // TODO: Run the Gtk main loop to tear down the connection.
- //while (gtk_events_pending()) {
- // gtk_main_iteration();
- //}
+ // TODO(henrike): Run the Gtk main loop to tear down the connection.
+ /*
+ while (gtk_events_pending()) {
+ gtk_main_iteration();
+ }
+ */
rtc::CleanupSSL();
return 0;
}
diff --git a/webrtc/examples/peerconnection/client/linux/main_wnd.cc b/webrtc/examples/peerconnection/client/linux/main_wnd.cc
index 254fb946f9..cf98c1cac7 100644
--- a/webrtc/examples/peerconnection/client/linux/main_wnd.cc
+++ b/webrtc/examples/peerconnection/client/linux/main_wnd.cc
@@ -116,7 +116,8 @@ gboolean Redraw(gpointer data) {
wnd->OnRedraw();
return false;
}
-} // end anonymous
+
+} // namespace
//
// GtkMainWnd implementation.
@@ -174,7 +175,8 @@ void GtkMainWnd::StopLocalRenderer() {
local_renderer_.reset();
}
-void GtkMainWnd::StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video) {
+void GtkMainWnd::StartRemoteRenderer(
+ webrtc::VideoTrackInterface* remote_video) {
remote_renderer_.reset(new VideoRenderer(this, remote_video));
}
@@ -488,7 +490,7 @@ void GtkMainWnd::VideoRenderer::RenderFrame(
static_cast<int>(frame->GetHeight()));
int size = width_ * height_ * 4;
- // TODO: Convert directly to RGBA
+ // TODO(henrike): Convert directly to RGBA
frame->ConvertToRgbBuffer(cricket::FOURCC_ARGB,
image_.get(),
size,
diff --git a/webrtc/examples/peerconnection/client/linux/main_wnd.h b/webrtc/examples/peerconnection/client/linux/main_wnd.h
index 1a91082768..e35d4dd8fa 100644
--- a/webrtc/examples/peerconnection/client/linux/main_wnd.h
+++ b/webrtc/examples/peerconnection/client/linux/main_wnd.h
@@ -8,8 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
+
+#include <string>
#include "webrtc/examples/peerconnection/client/main_wnd.h"
#include "webrtc/examples/peerconnection/client/peer_connection_client.h"
@@ -115,4 +117,4 @@ class GtkMainWnd : public MainWindow {
int draw_buffer_size_;
};
-#endif // PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
diff --git a/webrtc/examples/peerconnection/client/main_wnd.cc b/webrtc/examples/peerconnection/client/main_wnd.cc
index 30b12a8511..72f85b9eb2 100644
--- a/webrtc/examples/peerconnection/client/main_wnd.cc
+++ b/webrtc/examples/peerconnection/client/main_wnd.cc
@@ -13,6 +13,7 @@
#include <math.h>
#include "webrtc/examples/peerconnection/client/defaults.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
@@ -241,7 +242,7 @@ void MainWnd::OnPaint() {
// Set the map mode so that the ratio will be maintained for us.
HDC all_dc[] = { ps.hdc, dc_mem };
- for (int i = 0; i < ARRAY_SIZE(all_dc); ++i) {
+ for (int i = 0; i < arraysize(all_dc); ++i) {
SetMapMode(all_dc[i], MM_ISOTROPIC);
SetWindowExtEx(all_dc[i], width, height, NULL);
SetViewportExtEx(all_dc[i], rc.right, rc.bottom, NULL);
diff --git a/webrtc/examples/peerconnection/client/main_wnd.h b/webrtc/examples/peerconnection/client/main_wnd.h
index 9f61a568fd..ac4fd8a9b9 100644
--- a/webrtc/examples/peerconnection/client/main_wnd.h
+++ b/webrtc/examples/peerconnection/client/main_wnd.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
#pragma once
#include <map>
@@ -60,7 +60,8 @@ class MainWindow {
virtual void StartLocalRenderer(webrtc::VideoTrackInterface* local_video) = 0;
virtual void StopLocalRenderer() = 0;
- virtual void StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video) = 0;
+ virtual void StartRemoteRenderer(
+ webrtc::VideoTrackInterface* remote_video) = 0;
virtual void StopRemoteRenderer() = 0;
virtual void QueueUIThreadCallback(int msg_id, void* data) = 0;
@@ -197,4 +198,4 @@ class MainWnd : public MainWindow {
};
#endif // WIN32
-#endif // PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
diff --git a/webrtc/examples/peerconnection/client/peer_connection_client.cc b/webrtc/examples/peerconnection/client/peer_connection_client.cc
index d49ce35060..9875115c4b 100644
--- a/webrtc/examples/peerconnection/client/peer_connection_client.cc
+++ b/webrtc/examples/peerconnection/client/peer_connection_client.cc
@@ -43,7 +43,7 @@ rtc::AsyncSocket* CreateClientSocket(int family) {
#endif
}
-}
+} // namespace
PeerConnectionClient::PeerConnectionClient()
: callback_(NULL),
@@ -114,7 +114,7 @@ void PeerConnectionClient::Connect(const std::string& server, int port,
server_address_.SetPort(port);
client_name_ = client_name;
- if (server_address_.IsUnresolved()) {
+ if (server_address_.IsUnresolvedIP()) {
state_ = RESOLVING;
resolver_ = new rtc::AsyncResolver();
resolver_->SignalDone.connect(this, &PeerConnectionClient::OnResolveResult);
diff --git a/webrtc/examples/peerconnection/client/peer_connection_client.h b/webrtc/examples/peerconnection/client/peer_connection_client.h
index 5b5787bc14..b7abfdfe18 100644
--- a/webrtc/examples/peerconnection/client/peer_connection_client.h
+++ b/webrtc/examples/peerconnection/client/peer_connection_client.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
#pragma once
#include <map>
@@ -120,4 +120,4 @@ class PeerConnectionClient : public sigslot::has_slots<>,
int my_id_;
};
-#endif // PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
diff --git a/webrtc/examples/peerconnection/server/data_socket.h b/webrtc/examples/peerconnection/server/data_socket.h
index 454ad3978a..0ef61ea6aa 100644
--- a/webrtc/examples/peerconnection/server/data_socket.h
+++ b/webrtc/examples/peerconnection/server/data_socket.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
-#define TALK_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
#pragma once
#ifdef WIN32
@@ -150,4 +150,4 @@ class ListeningSocket : public SocketBase {
DataSocket* Accept() const;
};
-#endif // TALK_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
diff --git a/webrtc/examples/peerconnection/server/peer_channel.cc b/webrtc/examples/peerconnection/server/peer_channel.cc
index 150e5dec97..5e173cd460 100644
--- a/webrtc/examples/peerconnection/server/peer_channel.cc
+++ b/webrtc/examples/peerconnection/server/peer_channel.cc
@@ -19,6 +19,7 @@
#include "webrtc/examples/peerconnection/server/data_socket.h"
#include "webrtc/examples/peerconnection/server/utils.h"
#include "webrtc/base/stringutils.h"
+#include "webrtc/base/urlencode.h"
using rtc::sprintfn;
@@ -59,7 +60,7 @@ ChannelMember::ChannelMember(DataSocket* socket)
assert(socket);
assert(socket->method() == DataSocket::GET);
assert(socket->PathEquals("/sign_in"));
- name_ = socket->request_arguments(); // TODO: urldecode
+ name_ = rtc::UrlDecodeString(socket->request_arguments());
if (name_.empty())
name_ = "peer_" + int2str(id_);
else if (name_.length() > kMaxNameLength)
diff --git a/webrtc/examples/peerconnection/server/peer_channel.h b/webrtc/examples/peerconnection/server/peer_channel.h
index 263f17dfa8..6fd740d2f9 100644
--- a/webrtc/examples/peerconnection/server/peer_channel.h
+++ b/webrtc/examples/peerconnection/server/peer_channel.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
-#define TALK_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
#pragma once
#include <time.h>
@@ -117,4 +117,4 @@ class PeerChannel {
Members members_;
};
-#endif // TALK_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
diff --git a/webrtc/examples/peerconnection/server/utils.h b/webrtc/examples/peerconnection/server/utils.h
index e70968b875..e1c8729c0b 100644
--- a/webrtc/examples/peerconnection/server/utils.h
+++ b/webrtc/examples/peerconnection/server/utils.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
-#define TALK_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
#pragma once
#include <assert.h>
@@ -22,4 +22,4 @@
std::string int2str(int i);
std::string size_t2str(size_t i);
-#endif // TALK_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
diff --git a/webrtc/examples/stunserver/stunserver_main.cc b/webrtc/examples/stunserver/stunserver_main.cc
index 9cbd6156da..9bdf58ac3f 100644
--- a/webrtc/examples/stunserver/stunserver_main.cc
+++ b/webrtc/examples/stunserver/stunserver_main.cc
@@ -17,7 +17,7 @@
#include "webrtc/p2p/base/stunserver.h"
#include "webrtc/base/thread.h"
-using namespace cricket;
+using cricket::StunServer;
int main(int argc, char* argv[]) {
if (argc != 2) {