aboutsummaryrefslogtreecommitdiff
path: root/webrtc/examples/android
diff options
context:
space:
mode:
authorChih-hung Hsieh <chh@google.com>2015-12-01 17:07:48 +0000
committerandroid-build-merger <android-build-merger@google.com>2015-12-01 17:07:48 +0000
commita4acd9d6bc9b3b033d7d274316e75ee067df8d20 (patch)
tree672a185b294789cf991f385c3e395dd63bea9063 /webrtc/examples/android
parent3681b90ba4fe7a27232dd3e27897d5d7ed9d651c (diff)
parentfe8b4a657979b49e1701bd92f6d5814a99e0b2be (diff)
downloadwebrtc-a4acd9d6bc9b3b033d7d274316e75ee067df8d20.tar.gz
Merge changes I7bbf776e,I1b827825
am: fe8b4a6579 * commit 'fe8b4a657979b49e1701bd92f6d5814a99e0b2be': (7237 commits) WIP: Changes after merge commit 'cb3f9bd' Make the nonlinear beamformer steerable Utilize bitrate above codec max to protect video. Enable VP9 internal resize by default. Filter overlapping RTP header extensions. Make VCMEncodedFrameCallback const. MediaCodecVideoEncoder: Add number of quality resolution downscales to Encoded callback. Remove redudant encoder rate calls. Create isolate files for nonparallel tests. Register header extensions in RtpRtcpObserver to avoid log spam. Make an enum class out of NetEqDecoder, and hide the neteq_decoders_ table ACM: Move NACK functionality inside NetEq Fix chromium-style warnings in webrtc/sound/. Create a 'webrtc_nonparallel_tests' target. Update scalability structure data according to updates in the RTP payload profile. audio_coding: rename interface -> include Rewrote perform_action_on_all_files to be parallell. Update reference indices according to updates in the RTP payload profile. Disable P2PTransport...TestFailoverControlledSide on Memcheck pass clangcl compile options to ignore warnings in gflags.cc ...
Diffstat (limited to 'webrtc/examples/android')
-rw-r--r--webrtc/examples/android/media_demo/AndroidManifest.xml29
-rw-r--r--webrtc/examples/android/media_demo/README24
-rw-r--r--webrtc/examples/android/media_demo/build.xml92
-rw-r--r--webrtc/examples/android/media_demo/jni/jni_helpers.cc82
-rw-r--r--webrtc/examples/android/media_demo/jni/jni_helpers.h79
-rw-r--r--webrtc/examples/android/media_demo/jni/on_load.cc48
-rw-r--r--webrtc/examples/android/media_demo/jni/voice_engine_jni.cc423
-rw-r--r--webrtc/examples/android/media_demo/jni/voice_engine_jni.h31
-rw-r--r--webrtc/examples/android/media_demo/project.properties14
-rw-r--r--webrtc/examples/android/media_demo/res/drawable/logo.pngbin0 -> 3305 bytes
-rw-r--r--webrtc/examples/android/media_demo/res/layout/audiomenu.xml80
-rw-r--r--webrtc/examples/android/media_demo/res/layout/dropdownitems.xml17
-rw-r--r--webrtc/examples/android/media_demo/res/layout/mainmenu.xml26
-rw-r--r--webrtc/examples/android/media_demo/res/layout/settingsmenu.xml36
-rw-r--r--webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml5
-rw-r--r--webrtc/examples/android/media_demo/res/values/bools.xml13
-rw-r--r--webrtc/examples/android/media_demo/res/values/integers.xml13
-rw-r--r--webrtc/examples/android/media_demo/res/values/strings.xml41
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java156
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java39
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java123
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java321
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java22
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java32
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java129
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java49
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java117
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java210
30 files changed, 2281 insertions, 0 deletions
diff --git a/webrtc/examples/android/media_demo/AndroidManifest.xml b/webrtc/examples/android/media_demo/AndroidManifest.xml
new file mode 100644
index 0000000000..62bf46076f
--- /dev/null
+++ b/webrtc/examples/android/media_demo/AndroidManifest.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ android:versionCode="1" package="org.webrtc.webrtcdemo" android:versionName="1.07">
+ <application android:icon="@drawable/logo"
+ android:label="@string/appName"
+ android:debuggable="true">
+ <activity android:name=".WebRTCDemo"
+ android:theme="@android:style/Theme.Holo"
+ android:label="@string/appName"
+ android:screenOrientation="landscape"
+ >
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ <action android:name="android.intent.action.HEADSET_PLUG"/>
+ </intent-filter>
+ </activity>
+ </application>
+
+ <uses-sdk android:minSdkVersion="14" />
+ <uses-permission android:name="android.permission.CAMERA"></uses-permission>
+ <uses-feature android:name="android.hardware.camera" />
+ <uses-feature android:name="android.hardware.camera.autofocus" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.WAKE_LOCK" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+</manifest> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/README b/webrtc/examples/android/media_demo/README
new file mode 100644
index 0000000000..af71f38f46
--- /dev/null
+++ b/webrtc/examples/android/media_demo/README
@@ -0,0 +1,24 @@
+This directory contains a sample app for sending and receiving audio
+on Android. It further lets you enable and disable some call quality
+enhancements such as echo cancellation, noise suppression etc.
+
+Prerequisites:
+- Make sure gclient is checking out tools necessary to target Android: your
+ .gclient file should contain a line like:
+ target_os = ['android']
+ Make sure to re-run gclient sync after adding this to download the tools.
+- Env vars need to be set up to target Android; easiest way to do this is to run
+ (from the libjingle trunk directory):
+ . ./build/android/envsetup.sh
+ Note that this clobbers any previously-set $GYP_DEFINES so it must be done
+ before the next item.
+- Set up webrtc-related GYP variables:
+ export GYP_DEFINES="$GYP_DEFINES java_home=</path/to/JDK>"
+- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
+
+Example of building the app:
+cd <path/to/repository>/trunk
+ninja -C out/Debug WebRTCDemo
+
+It can then be installed and run on the device:
+adb install -r out/Debug/WebRTCDemo-debug.apk
diff --git a/webrtc/examples/android/media_demo/build.xml b/webrtc/examples/android/media_demo/build.xml
new file mode 100644
index 0000000000..17734886d9
--- /dev/null
+++ b/webrtc/examples/android/media_demo/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="WebRTCDemo" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_HOME env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.cc b/webrtc/examples/android/media_demo/jni/jni_helpers.cc
new file mode 100644
index 0000000000..b0d1a7425c
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/jni_helpers.cc
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+
+#include <limits>
+
+#include "unicode/unistr.h"
+
+using icu::UnicodeString;
+
+jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
+ const char* signature) {
+ jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
+ CHECK_JNI_EXCEPTION(jni, "error during GetMethodID");
+ return m;
+}
+
+jlong jlongFromPointer(void* ptr) {
+ CHECK(sizeof(intptr_t) <= sizeof(jlong), "Time to rethink the use of jlongs");
+ // Going through intptr_t to be obvious about the definedness of the
+ // conversion from pointer to integral type. intptr_t to jlong is a standard
+ // widening by the COMPILE_ASSERT above.
+ jlong ret = reinterpret_cast<intptr_t>(ptr);
+ CHECK(reinterpret_cast<void*>(ret) == ptr,
+ "jlong does not convert back to pointer");
+ return ret;
+}
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
+ const jchar* jchars = jni->GetStringChars(j_string, NULL);
+ CHECK_JNI_EXCEPTION(jni, "Error during GetStringChars");
+ UnicodeString ustr(jchars, jni->GetStringLength(j_string));
+ CHECK_JNI_EXCEPTION(jni, "Error during GetStringLength");
+ jni->ReleaseStringChars(j_string, jchars);
+ CHECK_JNI_EXCEPTION(jni, "Error during ReleaseStringChars");
+ std::string ret;
+ return ustr.toUTF8String(ret);
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni, const char** classes,
+ int size) {
+ for (int i = 0; i < size; ++i) {
+ LoadClass(jni, classes[i]);
+ }
+}
+ClassReferenceHolder::~ClassReferenceHolder() {
+ CHECK(classes_.empty(), "Must call FreeReferences() before dtor!");
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+ for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+ it != classes_.end(); ++it) {
+ jni->DeleteGlobalRef(it->second);
+ }
+ classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+ std::map<std::string, jclass>::iterator it = classes_.find(name);
+ CHECK(it != classes_.end(), "Could not find class");
+ return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+ jclass localRef = jni->FindClass(name.c_str());
+ CHECK_JNI_EXCEPTION(jni, "Could not load class");
+ CHECK(localRef, name.c_str());
+ jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+ CHECK_JNI_EXCEPTION(jni, "error during NewGlobalRef");
+ CHECK(globalRef, name.c_str());
+ bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+ CHECK(inserted, "Duplicate class name");
+}
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.h b/webrtc/examples/android/media_demo/jni/jni_helpers.h
new file mode 100644
index 0000000000..3d8ff48111
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/jni_helpers.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
+#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
+
+// TODO(henrike): this file contains duplication with regards to
+// talk/app/webrtc/java/jni/peerconnection_jni.cc. When/if code can be shared
+// between trunk/talk and trunk/webrtc remove the duplication.
+
+#include <android/log.h>
+#include <jni.h>
+
+#include <assert.h>
+#include <map>
+#include <string>
+
+#define TAG "WEBRTC-NATIVE"
+
+// Abort the process if |x| is false, emitting |msg| to logcat.
+#define CHECK(x, msg) \
+ if (x) { \
+ } else { \
+ __android_log_print(ANDROID_LOG_ERROR, TAG, "%s:%d: %s", __FILE__, \
+ __LINE__, msg); \
+ assert(false); \
+ }
+
+// Abort the process if |jni| has a Java exception pending, emitting |msg| to
+// logcat.
+#define CHECK_JNI_EXCEPTION(jni, msg) \
+ if (0) { \
+ } else { \
+ if (jni->ExceptionCheck()) { \
+ jni->ExceptionDescribe(); \
+ jni->ExceptionClear(); \
+ CHECK(0, msg); \
+ } \
+ }
+
+// JNIEnv-helper methods that CHECK success: no Java exception thrown and found
+// object/class/method/field is non-null.
+jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
+ const char* signature);
+
+// Return a |jlong| that will automatically convert back to |ptr| when assigned
+// to a |uint64_t|
+jlong jlongFromPointer(void* ptr);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
+
+// Android's FindClass() is trickier than usual because the app-specific
+// ClassLoader is not consulted when there is no app-specific frame on the
+// stack. Consequently, we only look up classes once in JNI_OnLoad.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+class ClassReferenceHolder {
+ public:
+ ClassReferenceHolder(JNIEnv* jni, const char** classes, int size);
+ ~ClassReferenceHolder();
+
+ void FreeReferences(JNIEnv* jni);
+
+ jclass GetClass(const std::string& name);
+
+ private:
+ void LoadClass(JNIEnv* jni, const std::string& name);
+
+ std::map<std::string, jclass> classes_;
+};
+
+#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
diff --git a/webrtc/examples/android/media_demo/jni/on_load.cc b/webrtc/examples/android/media_demo/jni/on_load.cc
new file mode 100644
index 0000000000..5827ee8a30
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/on_load.cc
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include <assert.h>
+
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+// Macro for native functions that can be found by way of jni-auto discovery.
+// Note extern "C" is needed for "discovery" of native methods to work.
+#define JOWW(rettype, name) \
+ extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
+
+static JavaVM* g_vm = NULL;
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
+ // Only called once.
+ CHECK(!g_vm, "OnLoad called more than once");
+ g_vm = vm;
+ return JNI_VERSION_1_4;
+}
+
+JOWW(void, NativeWebRtcContextRegistry_register)(
+ JNIEnv* jni,
+ jclass,
+ jobject context) {
+ webrtc_examples::SetVoeDeviceObjects(g_vm);
+ CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, context) == 0,
+ "Failed to register android objects to voice engine");
+}
+
+JOWW(void, NativeWebRtcContextRegistry_unRegister)(
+ JNIEnv* jni,
+ jclass) {
+ CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL) == 0,
+ "Failed to unregister android objects from voice engine");
+ webrtc_examples::ClearVoeDeviceObjects();
+}
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc b/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
new file mode 100644
index 0000000000..79d6cbc4b7
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
@@ -0,0 +1,423 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains JNI for the voice engine interfaces.
+// The native functions are found using jni's auto discovery.
+
+#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
+
+#include <map>
+#include <string>
+
+#include "webrtc/base/arraysize.h"
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/voice_engine/include/voe_audio_processing.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+#include "webrtc/voice_engine/include/voe_codec.h"
+#include "webrtc/voice_engine/include/voe_file.h"
+#include "webrtc/voice_engine/include/voe_hardware.h"
+#include "webrtc/voice_engine/include/voe_network.h"
+#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
+#include "webrtc/voice_engine/include/voe_volume_control.h"
+
+// Macro for native functions that can be found by way of jni-auto discovery.
+// Note extern "C" is needed for "discovery" of native methods to work.
+#define JOWW(rettype, name) \
+ extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
+
+namespace {
+
+static JavaVM* g_vm = NULL;
+static ClassReferenceHolder* g_class_reference_holder = NULL;
+
+jclass GetClass(JNIEnv* jni, const char* name) {
+ CHECK(g_class_reference_holder, "Class reference holder NULL");
+ return g_class_reference_holder->GetClass(name);
+}
+
+static const char* g_classes[] = {"org/webrtc/webrtcdemo/CodecInst"};
+
+template<typename T>
+void ReleaseSubApi(T instance) {
+ CHECK(instance->Release() >= 0, "failed to release instance")
+}
+
+class VoiceEngineData {
+ public:
+ VoiceEngineData()
+ : ve(webrtc::VoiceEngine::Create()),
+ base(webrtc::VoEBase::GetInterface(ve)),
+ codec(webrtc::VoECodec::GetInterface(ve)),
+ file(webrtc::VoEFile::GetInterface(ve)),
+ netw(webrtc::VoENetwork::GetInterface(ve)),
+ apm(webrtc::VoEAudioProcessing::GetInterface(ve)),
+ volume(webrtc::VoEVolumeControl::GetInterface(ve)),
+ hardware(webrtc::VoEHardware::GetInterface(ve)),
+ rtp(webrtc::VoERTP_RTCP::GetInterface(ve)) {
+ CHECK(ve != NULL, "Voice engine instance failed to be created");
+ CHECK(base != NULL, "Failed to acquire base interface");
+ CHECK(codec != NULL, "Failed to acquire codec interface");
+ CHECK(file != NULL, "Failed to acquire file interface");
+ CHECK(netw != NULL, "Failed to acquire netw interface");
+ CHECK(apm != NULL, "Failed to acquire apm interface");
+ CHECK(volume != NULL, "Failed to acquire volume interface");
+ CHECK(hardware != NULL, "Failed to acquire hardware interface");
+ CHECK(rtp != NULL, "Failed to acquire rtp interface");
+ }
+
+ ~VoiceEngineData() {
+ CHECK(channel_transports_.empty(),
+ "VoE transports must be deleted before terminating");
+ CHECK(base->Terminate() == 0, "VoE failed to terminate");
+ ReleaseSubApi(base);
+ ReleaseSubApi(codec);
+ ReleaseSubApi(file);
+ ReleaseSubApi(netw);
+ ReleaseSubApi(apm);
+ ReleaseSubApi(volume);
+ ReleaseSubApi(hardware);
+ ReleaseSubApi(rtp);
+ webrtc::VoiceEngine* ve_instance = ve;
+ CHECK(webrtc::VoiceEngine::Delete(ve_instance), "VoE failed to be deleted");
+ }
+
+ int CreateChannel() {
+ int channel = base->CreateChannel();
+ if (channel == -1) {
+ return -1;
+ }
+ CreateTransport(channel);
+ return channel;
+ }
+
+ int DeleteChannel(int channel) {
+ if (base->DeleteChannel(channel) != 0) {
+ return -1;
+ }
+ DeleteTransport(channel);
+ return 0;
+ }
+
+ webrtc::test::VoiceChannelTransport* GetTransport(int channel) {
+ ChannelTransports::iterator found = channel_transports_.find(channel);
+ if (found == channel_transports_.end()) {
+ return NULL;
+ }
+ return found->second;
+ }
+
+ webrtc::VoiceEngine* const ve;
+ webrtc::VoEBase* const base;
+ webrtc::VoECodec* const codec;
+ webrtc::VoEFile* const file;
+ webrtc::VoENetwork* const netw;
+ webrtc::VoEAudioProcessing* const apm;
+ webrtc::VoEVolumeControl* const volume;
+ webrtc::VoEHardware* const hardware;
+ webrtc::VoERTP_RTCP* const rtp;
+
+ private:
+ // Voice engine no longer provides a socket implementation. There is,
+ // however, a socket implementation in webrtc::test.
+ typedef std::map<int, webrtc::test::VoiceChannelTransport*>
+ ChannelTransports;
+
+ void CreateTransport(int channel) {
+ CHECK(GetTransport(channel) == NULL,
+ "Transport already created for VoE channel, inconsistent state");
+ channel_transports_[channel] =
+ new webrtc::test::VoiceChannelTransport(netw, channel);
+ }
+ void DeleteTransport(int channel) {
+ CHECK(GetTransport(channel) != NULL,
+ "VoE channel missing transport, inconsistent state");
+ delete channel_transports_[channel];
+ channel_transports_.erase(channel);
+ }
+
+ ChannelTransports channel_transports_;
+};
+
+webrtc::CodecInst* GetCodecInst(JNIEnv* jni, jobject j_codec) {
+ jclass j_codec_class = jni->GetObjectClass(j_codec);
+ jfieldID native_codec_id =
+ jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
+ jlong j_p = jni->GetLongField(j_codec, native_codec_id);
+ return reinterpret_cast<webrtc::CodecInst*>(j_p);
+}
+
+} // namespace
+
+namespace webrtc_examples {
+
+void SetVoeDeviceObjects(JavaVM* vm) {
+ CHECK(vm, "Trying to register NULL vm");
+ g_vm = vm;
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ g_class_reference_holder = new ClassReferenceHolder(
+ jni, g_classes, arraysize(g_classes));
+}
+
+void ClearVoeDeviceObjects() {
+ CHECK(g_vm, "Clearing vm without it being set");
+ {
+ webrtc::AttachThreadScoped ats(g_vm);
+ g_class_reference_holder->FreeReferences(ats.env());
+ }
+ g_vm = NULL;
+ delete g_class_reference_holder;
+ g_class_reference_holder = NULL;
+}
+
+} // namespace webrtc_examples
+
+VoiceEngineData* GetVoiceEngineData(JNIEnv* jni, jobject j_voe) {
+ jclass j_voe_class = jni->GetObjectClass(j_voe);
+ jfieldID native_voe_id =
+ jni->GetFieldID(j_voe_class, "nativeVoiceEngine", "J");
+ jlong j_p = jni->GetLongField(j_voe, native_voe_id);
+ return reinterpret_cast<VoiceEngineData*>(j_p);
+}
+
+webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe) {
+ return GetVoiceEngineData(jni, j_voe)->ve;
+}
+
+JOWW(jlong, VoiceEngine_create)(JNIEnv* jni, jclass) {
+ VoiceEngineData* voe_data = new VoiceEngineData();
+ return jlongFromPointer(voe_data);
+}
+
+JOWW(void, VoiceEngine_dispose)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ delete voe_data;
+}
+
+JOWW(jint, VoiceEngine_init)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->Init();
+}
+
+JOWW(jint, VoiceEngine_createChannel)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->CreateChannel();
+}
+
+JOWW(jint, VoiceEngine_deleteChannel)(JNIEnv* jni, jobject j_voe,
+ jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->DeleteChannel(channel);
+}
+
+JOWW(jint, VoiceEngine_setLocalReceiver)(JNIEnv* jni, jobject j_voe,
+ jint channel, jint port) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::test::VoiceChannelTransport* transport =
+ voe_data->GetTransport(channel);
+ return transport->SetLocalReceiver(port);
+}
+
+JOWW(jint, VoiceEngine_setSendDestination)(JNIEnv* jni, jobject j_voe,
+ jint channel, jint port,
+ jstring j_addr) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string addr = JavaToStdString(jni, j_addr);
+ webrtc::test::VoiceChannelTransport* transport =
+ voe_data->GetTransport(channel);
+ return transport->SetSendDestination(addr.c_str(), port);
+}
+
+JOWW(jint, VoiceEngine_startListen)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartReceive(channel);
+}
+
+JOWW(jint, VoiceEngine_startPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartPlayout(channel);
+}
+
+JOWW(jint, VoiceEngine_startSend)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartSend(channel);
+}
+
+JOWW(jint, VoiceEngine_stopListen)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartReceive(channel);
+}
+
+JOWW(jint, VoiceEngine_stopPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StopPlayout(channel);
+}
+
+JOWW(jint, VoiceEngine_stopSend)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StopSend(channel);
+}
+
+JOWW(jint, VoiceEngine_setSpeakerVolume)(JNIEnv* jni, jobject j_voe,
+ jint level) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->volume->SetSpeakerVolume(level);
+}
+
+JOWW(jint, VoiceEngine_startPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
+ jint channel,
+ jstring j_filename,
+ jboolean loop) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return voe_data->file->StartPlayingFileLocally(channel,
+ filename.c_str(),
+ loop);
+}
+
+JOWW(jint, VoiceEngine_stopPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
+ jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->file->StopPlayingFileLocally(channel);
+}
+
+JOWW(jint, VoiceEngine_startPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
+ jint channel,
+ jstring j_filename,
+ jboolean loop) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return voe_data->file->StartPlayingFileAsMicrophone(channel,
+ filename.c_str(),
+ loop);
+}
+
+JOWW(jint, VoiceEngine_stopPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
+ jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->file->StopPlayingFileAsMicrophone(channel);
+}
+
+JOWW(jint, VoiceEngine_numOfCodecs)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->codec->NumOfCodecs();
+}
+
+JOWW(jobject, VoiceEngine_getCodec)(JNIEnv* jni, jobject j_voe, jint index) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::CodecInst* codec = new webrtc::CodecInst();
+ CHECK(voe_data->codec->GetCodec(index, *codec) == 0,
+ "getCodec must be called with valid index");
+ jclass j_codec_class = GetClass(jni, "org/webrtc/webrtcdemo/CodecInst");
+ jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "<init>", "(J)V");
+ jobject j_codec =
+ jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
+ CHECK_JNI_EXCEPTION(jni, "error during NewObject");
+ return j_codec;
+}
+
+JOWW(jint, VoiceEngine_setSendCodec)(JNIEnv* jni, jobject j_voe, jint channel,
+ jobject j_codec) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::CodecInst* inst = GetCodecInst(jni, j_codec);
+ return voe_data->codec->SetSendCodec(channel, *inst);
+}
+
+JOWW(jint, VoiceEngine_setEcStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
+ jint ec_mode) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetEcStatus(enable,
+ static_cast<webrtc::EcModes>(ec_mode));
+}
+
+JOWW(jint, VoiceEngine_setAecmMode)(JNIEnv* jni, jobject j_voe, jint aecm_mode,
+ jboolean cng) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetAecmMode(static_cast<webrtc::AecmModes>(aecm_mode),
+ cng);
+}
+
+JOWW(jint, VoiceEngine_setAgcStatus)(JNIEnv* jni, jobject j_voe,
+ jboolean enable, jint agc_mode) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetAgcStatus(enable,
+ static_cast<webrtc::AgcModes>(agc_mode));
+}
+
+// Returns the native AgcConfig object associated with the Java object
+// |j_codec|.
+void GetNativeAgcConfig(JNIEnv* jni, jobject j_codec,
+ webrtc::AgcConfig* agc_config) {
+ jclass j_codec_class = jni->GetObjectClass(j_codec);
+ jfieldID dBOv_id = jni->GetFieldID(j_codec_class, "targetLevelDbOv", "I");
+ agc_config->targetLeveldBOv = jni->GetIntField(j_codec, dBOv_id);
+ jfieldID gain_id =
+ jni->GetFieldID(j_codec_class, "digitalCompressionGaindB", "I");
+ agc_config->digitalCompressionGaindB = jni->GetIntField(j_codec, gain_id);
+ jfieldID limiter_id = jni->GetFieldID(j_codec_class, "limiterEnable", "Z");
+ agc_config->limiterEnable = jni->GetBooleanField(j_codec, limiter_id);
+}
+
+JOWW(jint, VoiceEngine_setAgcConfig)(JNIEnv* jni, jobject j_voe,
+ jobject j_config) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::AgcConfig config;
+ GetNativeAgcConfig(jni, j_config, &config);
+ return voe_data->apm->SetAgcConfig(config);
+}
+
+JOWW(jint, VoiceEngine_setNsStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
+ jint ns_mode) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetNsStatus(enable,
+ static_cast<webrtc::NsModes>(ns_mode));
+}
+
+JOWW(jint, VoiceEngine_startDebugRecording)(JNIEnv* jni, jobject j_voe,
+ jstring j_filename) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return voe_data->apm->StartDebugRecording(filename.c_str());
+}
+
+JOWW(jint, VoiceEngine_stopDebugRecording)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->StopDebugRecording();
+}
+
+JOWW(void, CodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
+ delete GetCodecInst(jni, j_codec);
+}
+
+JOWW(jint, CodecInst_plType)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->pltype;
+}
+
+JOWW(jstring, CodecInst_name)(JNIEnv* jni, jobject j_codec) {
+ return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plname);
+}
+
+JOWW(jint, CodecInst_plFrequency)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->plfreq;
+}
+
+JOWW(jint, CodecInst_pacSize)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->pacsize;
+}
+
+JOWW(jint, CodecInst_channels)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->channels;
+}
+
+JOWW(jint, CodecInst_rate)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->rate;
+}
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.h b/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
new file mode 100644
index 0000000000..57ef507653
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
+#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
+
+#include <jni.h>
+
+namespace webrtc {
+
+class VoiceEngine;
+
+} // namespace webrtc
+
+namespace webrtc_examples {
+
+void SetVoeDeviceObjects(JavaVM* vm);
+void ClearVoeDeviceObjects();
+
+} // namespace webrtc_examples
+
+webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe);
+
+#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
diff --git a/webrtc/examples/android/media_demo/project.properties b/webrtc/examples/android/media_demo/project.properties
new file mode 100644
index 0000000000..69eb2d039b
--- /dev/null
+++ b/webrtc/examples/android/media_demo/project.properties
@@ -0,0 +1,14 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system use,
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-23
diff --git a/webrtc/examples/android/media_demo/res/drawable/logo.png b/webrtc/examples/android/media_demo/res/drawable/logo.png
new file mode 100644
index 0000000000..1ff07d1102
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/drawable/logo.png
Binary files differ
diff --git a/webrtc/examples/android/media_demo/res/layout/audiomenu.xml b/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
new file mode 100644
index 0000000000..f35547062a
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:orientation="vertical">
+ <TextView android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textStyle="bold"
+ android:textSize="24dip"
+ android:text="Audio Settings">
+ </TextView>
+ <TextView android:layout_height="wrap_content"
+ android:layout_gravity="bottom"
+ android:layout_width="wrap_content"
+ android:text="@string/codecType">
+ </TextView>
+ <Spinner android:id="@+id/spAudioCodecType"
+ android:layout_height="wrap_content"
+ android:layout_width="fill_parent">
+ </Spinner>
+ <LinearLayout android:layout_height="wrap_content"
+ android:layout_width="fill_parent">
+ <TextView android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/aTxPort">
+ </TextView>
+ <EditText android:id="@+id/etATxPort"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:imeOptions="actionDone"
+ android:inputType="number">
+ </EditText>
+ <TextView android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/aRxPort">
+ </TextView>
+ <EditText android:id="@+id/etARxPort"
+ android:layout_height="wrap_content"
+ android:layout_width="wrap_content"
+ android:imeOptions="actionDone"
+ android:inputType="number">
+ </EditText>
+ </LinearLayout>
+ <LinearLayout android:layout_height="wrap_content"
+ android:layout_width="fill_parent">
+ <CheckBox android:id="@+id/cbAecm"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/aecm">
+ </CheckBox>
+ <CheckBox android:id="@+id/cbNoiseSuppression"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/noiseSuppression">
+ </CheckBox>
+ <CheckBox android:id="@+id/cbAutoGainControl"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/autoGainControl">
+ </CheckBox>
+ </LinearLayout>
+ <LinearLayout android:layout_height="wrap_content"
+ android:layout_width="fill_parent">
+ <CheckBox android:id="@+id/cbSpeaker"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/speaker">
+ </CheckBox>
+ <CheckBox android:id="@+id/cbDebugRecording"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/debugRecording">
+ </CheckBox>
+ <CheckBox android:id="@+id/cbAudioRTPDump"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/rtpDump">
+ </CheckBox>
+ </LinearLayout>
+</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml b/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
new file mode 100644
index 0000000000..1014612000
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:orientation="vertical"
+ android:padding="3dip">
+ <TextView android:id="@+id/spinner_row"
+ android:layout_toRightOf="@+id/image"
+ android:padding="3dip"
+ android:layout_marginTop="2dip"
+ android:textColor="#FFF"
+ android:textStyle="bold"
+ android:text="description"
+ android:layout_marginLeft="5dip"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"/>
+</RelativeLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/mainmenu.xml b/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
new file mode 100644
index 0000000000..89f5399df7
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="horizontal"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent">
+ <LinearLayout
+ android:orientation="vertical"
+ android:layout_width="120dip"
+ android:layout_height="fill_parent">
+ <TextView android:id="@+id/tvStats"
+ android:layout_width="fill_parent"
+ android:layout_height="60dip"
+ android:textSize="6sp"
+ android:text=""/>
+ <Button android:id="@+id/btStats"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:layout_gravity="bottom"
+ android:text="@string/stats"/>
+ <Button android:id="@+id/btStartStopCall"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:layout_gravity="bottom"/>
+ </LinearLayout>
+</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml b/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
new file mode 100644
index 0000000000..4fba57eadc
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layout_gravity="right"
+ android:orientation="vertical">
+ <TextView android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textStyle="bold"
+ android:textSize="24dip"
+ android:text="@string/gSettings">
+ </TextView>
+ <LinearLayout android:orientation="horizontal"
+ android:layout_height="wrap_content"
+ android:layout_width="fill_parent">
+ <CheckBox android:id="@+id/cbAudio"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/enableAudio">
+ </CheckBox>
+ <CheckBox android:id="@+id/cbLoopback"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/loopback">
+ </CheckBox>
+ </LinearLayout>
+ <TextView android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/remoteIp">
+ </TextView>
+ <EditText android:id="@+id/etRemoteIp"
+ android:layout_height="wrap_content"
+ android:layout_width="fill_parent"
+ android:imeOptions="actionDone">
+ </EditText>
+</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml b/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
new file mode 100644
index 0000000000..a4921a6bbe
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
@@ -0,0 +1,5 @@
+<menu xmlns:android="http://schemas.android.com/apk/res/android" >
+ <item android:id="@+id/action_exit"
+ android:icon="@drawable/logo"
+ android:title="Exit"/>
+</menu> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/bools.xml b/webrtc/examples/android/media_demo/res/values/bools.xml
new file mode 100644
index 0000000000..d4f3fc0e95
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/values/bools.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <bool name="apm_debug_enabled_default">false</bool>
+ <bool name="audio_enabled_default">true</bool>
+ <bool name="loopback_enabled_default">true</bool>
+ <bool name="nack_enabled_default">true</bool>
+ <bool name="opengl_enabled_default">true</bool>
+ <bool name="speaker_enabled_default">false</bool>
+ <bool name="stats_enabled_default">true</bool>
+ <bool name="trace_enabled_default">true</bool>
+ <bool name="video_receive_enabled_default">true</bool>
+ <bool name="video_send_enabled_default">true</bool>
+</resources> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/integers.xml b/webrtc/examples/android/media_demo/res/values/integers.xml
new file mode 100644
index 0000000000..562643b5f2
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/values/integers.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <integer name="aRxPortDefault">11113</integer>
+ <integer name="aTxPortDefault">11113</integer>
+ <integer name="openGl">0</integer>
+ <integer name="surfaceView">1</integer>
+ <integer name="mediaCodec">2</integer>
+ <integer name="defaultView">0</integer>
+ <integer name="call_restart_periodicity_ms">0</integer>
+ <integer name="video_codec_default">0</integer>
+ <integer name="vRxPortDefault">11111</integer>
+ <integer name="vTxPortDefault">11111</integer>
+</resources> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/strings.xml b/webrtc/examples/android/media_demo/res/values/strings.xml
new file mode 100644
index 0000000000..297d289b0c
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/values/strings.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="aecm">AECM</string>
+ <string name="appName">WebRTC Engine Demo</string>
+ <string name="aRxPort">Audio Rx Port</string>
+ <string name="aTxPort">Audio Tx Port</string>
+ <string name="autoGainControl">AGC</string>
+ <string name="backCamera">SwitchToBack</string>
+ <string name="codecSize">Codec Size</string>
+ <string name="codecType">Codec Type</string>
+ <string name="debugRecording">APMRecord</string>
+ <string name="demoTitle">Video Engine Android Demo</string>
+ <string name="enableVideoReceive">Video Receive</string>
+ <string name="enableVideoSend">Video Send</string>
+ <string name="enableAudio">Audio</string>
+ <string name="error">Error</string>
+ <string name="errorCamera">Camera Error</string>
+ <string name="exit">Exit</string>
+ <string name="frontCamera">SwitchToFront</string>
+ <string name="gSettings">Global Settings</string>
+ <string name="loopback">Loopback</string>
+ <string name="loopbackIp">127.0.0.1</string>
+ <string name="nack">NACK</string>
+ <string name="noiseSuppression">NS</string>
+ <string name="remoteIp">Remote IP address</string>
+ <string name="rtpDump">rtpdump</string>
+ <string name="speaker">Speaker</string>
+ <string name="startBoth">Start Both</string>
+ <string name="startCall">StartCall</string>
+ <string name="startListen">Start Listen</string>
+ <string name="startSend">Start Send</string>
+ <string name="stats">Stats</string>
+ <string name="statsOn">Stats on</string>
+ <string name="statsOff">Stats off</string>
+ <string name="stopCall">StopCall</string>
+ <string name="surfaceView">SurfaceView</string>
+ <string name="tag">WEBRTC</string>
+ <string name="vRxPort">Video Rx Port</string>
+ <string name="vSettings">Video Settings</string>
+ <string name="vTxPort">Video Tx Port</string>
+</resources>
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
new file mode 100644
index 0000000000..94e23c2465
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.Spinner;
+import android.widget.TextView;
+import java.lang.Integer;
+
+public class AudioMenuFragment extends Fragment {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.audiomenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ String[] audioCodecsStrings = getEngine().audioCodecsAsString();
+ Spinner spAudioCodecType = (Spinner) v.findViewById(R.id.spAudioCodecType);
+ spAudioCodecType.setAdapter(new SpinnerAdapter(getActivity(),
+ R.layout.dropdownitems,
+ audioCodecsStrings,
+ inflater));
+ spAudioCodecType.setSelection(getEngine().audioCodecIndex());
+ spAudioCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
+ public void onItemSelected(AdapterView<?> adapterView, View view,
+ int position, long id) {
+ getEngine().setAudioCodec(position);
+ }
+ public void onNothingSelected(AdapterView<?> arg0) {
+ Log.d(TAG, "No setting selected");
+ }
+ });
+
+ EditText etATxPort = (EditText) v.findViewById(R.id.etATxPort);
+ etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
+ etATxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etATxPort = (EditText) editText;
+ getEngine()
+ .setAudioTxPort(Integer.parseInt(etATxPort.getText().toString()));
+ etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
+ }
+ });
+ EditText etARxPort = (EditText) v.findViewById(R.id.etARxPort);
+ etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
+ etARxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etARxPort = (EditText) editText;
+ getEngine()
+ .setAudioRxPort(Integer.parseInt(etARxPort.getText().toString()));
+ etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
+
+ }
+ });
+
+ CheckBox cbEnableAecm = (CheckBox) v.findViewById(R.id.cbAecm);
+ cbEnableAecm.setChecked(getEngine().aecmEnabled());
+ cbEnableAecm.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAecm = (CheckBox) checkBox;
+ getEngine().setEc(cbEnableAecm.isChecked());
+ cbEnableAecm.setChecked(getEngine().aecmEnabled());
+ }
+ });
+ CheckBox cbEnableNs = (CheckBox) v.findViewById(R.id.cbNoiseSuppression);
+ cbEnableNs.setChecked(getEngine().nsEnabled());
+ cbEnableNs.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableNs = (CheckBox) checkBox;
+ getEngine().setNs(cbEnableNs.isChecked());
+ cbEnableNs.setChecked(getEngine().nsEnabled());
+ }
+ });
+ CheckBox cbEnableAgc = (CheckBox) v.findViewById(R.id.cbAutoGainControl);
+ cbEnableAgc.setChecked(getEngine().agcEnabled());
+ cbEnableAgc.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAgc = (CheckBox) checkBox;
+ getEngine().setAgc(cbEnableAgc.isChecked());
+ cbEnableAgc.setChecked(getEngine().agcEnabled());
+ }
+ });
+ CheckBox cbEnableSpeaker = (CheckBox) v.findViewById(R.id.cbSpeaker);
+ cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
+ cbEnableSpeaker.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableSpeaker = (CheckBox) checkBox;
+ getEngine().setSpeaker(cbEnableSpeaker.isChecked());
+ cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
+ }
+ });
+ CheckBox cbEnableDebugAPM =
+ (CheckBox) v.findViewById(R.id.cbDebugRecording);
+ cbEnableDebugAPM.setChecked(getEngine().apmRecord());
+ cbEnableDebugAPM.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableDebugAPM = (CheckBox) checkBox;
+ getEngine().setDebuging(cbEnableDebugAPM.isChecked());
+ cbEnableDebugAPM.setChecked(getEngine().apmRecord());
+ }
+ });
+ CheckBox cbEnableAudioRTPDump =
+ (CheckBox) v.findViewById(R.id.cbAudioRTPDump);
+ cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
+ cbEnableAudioRTPDump.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAudioRTPDump = (CheckBox) checkBox;
+ getEngine().setIncomingVoeRtpDump(cbEnableAudioRTPDump.isChecked());
+ cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
+ }
+ });
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
new file mode 100644
index 0000000000..133d63926b
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class CodecInst {
+ private final long nativeCodecInst;
+
+ // CodecInst can only be created from the native layer.
+ private CodecInst(long nativeCodecInst) {
+ this.nativeCodecInst = nativeCodecInst;
+ }
+
+ public String toString() {
+ return name() + " " +
+ "PlType: " + plType() + " " +
+ "PlFreq: " + plFrequency() + " " +
+ "Size: " + pacSize() + " " +
+ "Channels: " + channels() + " " +
+ "Rate: " + rate();
+ }
+
+ // Dispose must be called before all references to CodecInst are lost as it
+ // will free memory allocated in the native layer.
+ public native void dispose();
+ public native int plType();
+ public native String name();
+ public native int plFrequency();
+ public native int pacSize();
+ public native int channels();
+ public native int rate();
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
new file mode 100644
index 0000000000..793d784043
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.widget.LinearLayout;
+import android.widget.TextView;
+
+public class MainMenuFragment extends Fragment implements MediaEngineObserver {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ private Button btStartStopCall;
+ private TextView tvStats;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.mainmenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ Button btStats = (Button) v.findViewById(R.id.btStats);
+ boolean stats = getResources().getBoolean(R.bool.stats_enabled_default);
+ enableStats(btStats, stats);
+ btStats.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View button) {
+ boolean turnOnStats = ((Button) button).getText().equals(
+ getResources().getString(R.string.statsOn));
+ enableStats((Button) button, turnOnStats);
+ }
+ });
+ tvStats = (TextView) v.findViewById(R.id.tvStats);
+
+ btStartStopCall = (Button) v.findViewById(R.id.btStartStopCall);
+ btStartStopCall.setText(getEngine().isRunning() ?
+ R.string.stopCall :
+ R.string.startCall);
+ btStartStopCall.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View button) {
+ toggleStart();
+ }
+ });
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ // tvStats need to be updated on the UI thread.
+ public void newStats(final String stats) {
+ getActivity().runOnUiThread(new Runnable() {
+ public void run() {
+ tvStats.setText(stats);
+ }
+ });
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+ private void enableStats(Button btStats, boolean enable) {
+ if (enable) {
+ getEngine().setObserver(this);
+ } else {
+ getEngine().setObserver(null);
+ // Clear old stats text by posting empty stats.
+ newStats("");
+ }
+ // If stats was true it was just turned on. This means that
+ // clicking the button again should turn off stats.
+ btStats.setText(enable ? R.string.statsOff : R.string.statsOn);
+ }
+
+
+ public void toggleStart() {
+ if (getEngine().isRunning()) {
+ stopAll();
+ } else {
+ startCall();
+ }
+ btStartStopCall.setText(getEngine().isRunning() ?
+ R.string.stopCall :
+ R.string.startCall);
+ }
+
+ public void stopAll() {
+ getEngine().stop();
+ }
+
+ private void startCall() {
+ getEngine().start();
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
new file mode 100644
index 0000000000..a7036914ff
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
@@ -0,0 +1,321 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.AlertDialog;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.media.AudioManager;
+import android.os.Environment;
+import android.util.Log;
+import android.view.OrientationEventListener;
+import java.io.File;
+
+public class MediaEngine {
+ private static final String LOG_DIR = "webrtc";
+
+ // Checks for and communicate failures to user (logcat and popup).
+ private void check(boolean value, String message) {
+ if (value) {
+ return;
+ }
+ Log.e("WEBRTC-CHECK", message);
+ AlertDialog alertDialog = new AlertDialog.Builder(context).create();
+ alertDialog.setTitle("WebRTC Error");
+ alertDialog.setMessage(message);
+ alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
+ "OK",
+ new DialogInterface.OnClickListener() {
+ public void onClick(DialogInterface dialog, int which) {
+ dialog.dismiss();
+ return;
+ }
+ }
+ );
+ alertDialog.show();
+ }
+
+
+ // Shared Audio/Video members.
+ private final Context context;
+ private String remoteIp;
+ private boolean enableTrace;
+
+ // Audio
+ private VoiceEngine voe;
+ private int audioChannel;
+ private boolean audioEnabled;
+ private boolean voeRunning;
+ private int audioCodecIndex;
+ private int audioTxPort;
+ private int audioRxPort;
+
+ private boolean speakerEnabled;
+ private boolean headsetPluggedIn;
+ private boolean enableAgc;
+ private boolean enableNs;
+ private boolean enableAecm;
+
+ private BroadcastReceiver headsetListener;
+
+ private boolean audioRtpDump;
+ private boolean apmRecord;
+
+ private int inFps;
+ private int inKbps;
+ private int outFps;
+ private int outKbps;
+ private int inWidth;
+ private int inHeight;
+
+ public MediaEngine(Context context) {
+ this.context = context;
+ voe = new VoiceEngine();
+ check(voe.init() == 0, "Failed voe Init");
+ audioChannel = voe.createChannel();
+ check(audioChannel >= 0, "Failed voe CreateChannel");
+ check(audioChannel >= 0, "Failed voe CreateChannel");
+
+ check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
+ "VoE set Aecm speakerphone mode failed");
+
+ // Set audio mode to communication
+ AudioManager audioManager =
+ ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
+ audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
+ // Listen to headset being plugged in/out.
+ IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
+ headsetListener = new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
+ headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
+ updateAudioOutput();
+ }
+ }
+ };
+ context.registerReceiver(headsetListener, receiverFilter);
+ }
+
+ public void dispose() {
+ check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
+ context.unregisterReceiver(headsetListener);
+ check(voe.deleteChannel(audioChannel) == 0, "VoE delete channel failed");
+ voe.dispose();
+ }
+
+ public void start() {
+ if (audioEnabled) {
+ startVoE();
+ }
+ }
+
+ public void stop() {
+ stopVoe();
+ }
+
+ public boolean isRunning() {
+ return voeRunning;
+ }
+
+ public void setRemoteIp(String remoteIp) {
+ this.remoteIp = remoteIp;
+ UpdateSendDestination();
+ }
+
+ public String remoteIp() { return remoteIp; }
+
+ private String getDebugDirectory() {
+ // Should create a folder in /scard/|LOG_DIR|
+ return Environment.getExternalStorageDirectory().toString() + "/" +
+ LOG_DIR;
+ }
+
+ private boolean createDebugDirectory() {
+ File webrtc_dir = new File(getDebugDirectory());
+ if (!webrtc_dir.exists()) {
+ return webrtc_dir.mkdir();
+ }
+ return webrtc_dir.isDirectory();
+ }
+
+ public void startVoE() {
+ check(!voeRunning, "VoE already started");
+ check(voe.startListen(audioChannel) == 0, "Failed StartListen");
+ check(voe.startPlayout(audioChannel) == 0, "VoE start playout failed");
+ check(voe.startSend(audioChannel) == 0, "VoE start send failed");
+ voeRunning = true;
+ }
+
+ private void stopVoe() {
+ check(voeRunning, "VoE not started");
+ check(voe.stopSend(audioChannel) == 0, "VoE stop send failed");
+ check(voe.stopPlayout(audioChannel) == 0, "VoE stop playout failed");
+ check(voe.stopListen(audioChannel) == 0, "VoE stop listen failed");
+ voeRunning = false;
+ }
+
+ public void setAudio(boolean audioEnabled) {
+ this.audioEnabled = audioEnabled;
+ }
+
+ public boolean audioEnabled() { return audioEnabled; }
+
+ public int audioCodecIndex() { return audioCodecIndex; }
+
+ public void setAudioCodec(int codecNumber) {
+ audioCodecIndex = codecNumber;
+ CodecInst codec = voe.getCodec(codecNumber);
+ check(voe.setSendCodec(audioChannel, codec) == 0, "Failed setSendCodec");
+ codec.dispose();
+ }
+
+ public String[] audioCodecsAsString() {
+ String[] retVal = new String[voe.numOfCodecs()];
+ for (int i = 0; i < voe.numOfCodecs(); ++i) {
+ CodecInst codec = voe.getCodec(i);
+ retVal[i] = codec.toString();
+ codec.dispose();
+ }
+ return retVal;
+ }
+
+ private CodecInst[] defaultAudioCodecs() {
+ CodecInst[] retVal = new CodecInst[voe.numOfCodecs()];
+ for (int i = 0; i < voe.numOfCodecs(); ++i) {
+ retVal[i] = voe.getCodec(i);
+ }
+ return retVal;
+ }
+
+ public int getIsacIndex() {
+ CodecInst[] codecs = defaultAudioCodecs();
+ for (int i = 0; i < codecs.length; ++i) {
+ if (codecs[i].name().contains("ISAC")) {
+ return i;
+ }
+ }
+ return 0;
+ }
+
+ public void setAudioTxPort(int audioTxPort) {
+ this.audioTxPort = audioTxPort;
+ UpdateSendDestination();
+ }
+
+ public int audioTxPort() { return audioTxPort; }
+
+ public void setAudioRxPort(int audioRxPort) {
+ check(voe.setLocalReceiver(audioChannel, audioRxPort) == 0,
+ "Failed setLocalReceiver");
+ this.audioRxPort = audioRxPort;
+ }
+
+ public int audioRxPort() { return audioRxPort; }
+
+ public boolean agcEnabled() { return enableAgc; }
+
+ public void setAgc(boolean enable) {
+ enableAgc = enable;
+ VoiceEngine.AgcConfig agc_config =
+ new VoiceEngine.AgcConfig(3, 9, true);
+ check(voe.setAgcConfig(agc_config) == 0, "VoE set AGC Config failed");
+ check(voe.setAgcStatus(enableAgc, VoiceEngine.AgcModes.FIXED_DIGITAL) == 0,
+ "VoE set AGC Status failed");
+ }
+
+ public boolean nsEnabled() { return enableNs; }
+
+ public void setNs(boolean enable) {
+ enableNs = enable;
+ check(voe.setNsStatus(enableNs,
+ VoiceEngine.NsModes.MODERATE_SUPPRESSION) == 0,
+ "VoE set NS Status failed");
+ }
+
+ public boolean aecmEnabled() { return enableAecm; }
+
+ public void setEc(boolean enable) {
+ enableAecm = enable;
+ check(voe.setEcStatus(enable, VoiceEngine.EcModes.AECM) == 0,
+ "voe setEcStatus");
+ }
+
+ public boolean speakerEnabled() {
+ return speakerEnabled;
+ }
+
+ public void setSpeaker(boolean enable) {
+ speakerEnabled = enable;
+ updateAudioOutput();
+ }
+
+ // Debug helpers.
+ public boolean apmRecord() { return apmRecord; }
+
+ public boolean audioRtpDump() { return audioRtpDump; }
+
+ public void setDebuging(boolean enable) {
+ apmRecord = enable;
+ if (!enable) {
+ check(voe.stopDebugRecording() == 0, "Failed stopping debug");
+ return;
+ }
+ if (!createDebugDirectory()) {
+ check(false, "Unable to create debug directory.");
+ return;
+ }
+ String debugDirectory = getDebugDirectory();
+ check(voe.startDebugRecording(debugDirectory + String.format("/apm_%d.dat",
+ System.currentTimeMillis())) == 0,
+ "Failed starting debug");
+ }
+
+ public void setIncomingVoeRtpDump(boolean enable) {
+ audioRtpDump = enable;
+ if (!enable) {
+ check(voe.stopRtpDump(audioChannel,
+ VoiceEngine.RtpDirections.INCOMING) == 0,
+ "voe stopping rtp dump");
+ return;
+ }
+ String debugDirectory = getDebugDirectory();
+ check(voe.startRtpDump(audioChannel, debugDirectory +
+ String.format("/voe_%d.rtp", System.currentTimeMillis()),
+ VoiceEngine.RtpDirections.INCOMING) == 0,
+ "voe starting rtp dump");
+ }
+
+ private void updateAudioOutput() {
+ boolean useSpeaker = !headsetPluggedIn && speakerEnabled;
+ AudioManager audioManager =
+ ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
+ audioManager.setSpeakerphoneOn(useSpeaker);
+ }
+
+ private void UpdateSendDestination() {
+ if (remoteIp == null) {
+ return;
+ }
+ if (audioTxPort != 0) {
+ check(voe.setSendDestination(audioChannel, audioTxPort,
+ remoteIp) == 0, "VoE set send destination failed");
+ }
+ }
+
+ MediaEngineObserver observer;
+ public void setObserver(MediaEngineObserver observer) {
+ this.observer = observer;
+ }
+}
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
new file mode 100644
index 0000000000..3ea91b5e92
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public interface MediaEngineObserver {
+ void newStats(String stats);
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
new file mode 100644
index 0000000000..08cb508667
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public interface MenuStateProvider {
+ public MediaEngine getEngine();
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
new file mode 100644
index 0000000000..3d4f00a4f6
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.content.Context;
+
+public class NativeWebRtcContextRegistry {
+ static {
+ System.loadLibrary("webrtcdemo-jni");
+ }
+
+ public native void register(Context context);
+ public native void unRegister();
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
new file mode 100644
index 0000000000..dbe817b1af
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class RtcpStatistics {
+ // Definition of fraction lost can be found in RFC3550.
+ // It is equivalent to taking the integer part after multiplying the loss
+ // fraction by 256.
+ public final int fractionLost;
+ public final int cumulativeLost;
+ public final int extendedMax;
+ public final int jitter;
+ public final int rttMs;
+
+ // Only allowed to be created by the native layer.
+ private RtcpStatistics(int fractionLost, int cumulativeLost, int extendedMax,
+ int jitter, int rttMs) {
+ this.fractionLost = fractionLost;
+ this.cumulativeLost = cumulativeLost;
+ this.extendedMax = extendedMax;
+ this.jitter = jitter;
+ this.rttMs = rttMs;
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
new file mode 100644
index 0000000000..761f96ce29
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.RadioGroup;
+import android.widget.TextView;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.Enumeration;
+
+public class SettingsMenuFragment extends Fragment
+ implements RadioGroup.OnCheckedChangeListener {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ EditText etRemoteIp;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.settingsmenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ CheckBox cbAudio = (CheckBox) v.findViewById(R.id.cbAudio);
+ cbAudio.setChecked(getEngine().audioEnabled());
+ cbAudio.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbAudio = (CheckBox) checkBox;
+ getEngine().setAudio(cbAudio.isChecked());
+ cbAudio.setChecked(getEngine().audioEnabled());
+ }
+ });
+ boolean loopback =
+ getResources().getBoolean(R.bool.loopback_enabled_default);
+ CheckBox cbLoopback = (CheckBox) v.findViewById(R.id.cbLoopback);
+ cbLoopback.setChecked(loopback);
+ cbLoopback.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ loopbackChanged((CheckBox) checkBox);
+ }
+ });
+ etRemoteIp = (EditText) v.findViewById(R.id.etRemoteIp);
+ etRemoteIp.setOnFocusChangeListener(new View.OnFocusChangeListener() {
+ public void onFocusChange(View editText, boolean hasFocus) {
+ if (!hasFocus) {
+ getEngine().setRemoteIp(etRemoteIp.getText().toString());
+ }
+ }
+ });
+ // Has to be after remote IP as loopback changes it.
+ loopbackChanged(cbLoopback);
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ private void loopbackChanged(CheckBox cbLoopback) {
+ boolean loopback = cbLoopback.isChecked();
+ etRemoteIp.setText(loopback ? getLoopbackIPString() : getLocalIpAddress());
+ getEngine().setRemoteIp(etRemoteIp.getText().toString());
+ }
+
+ private String getLoopbackIPString() {
+ return getResources().getString(R.string.loopbackIp);
+ }
+
+ private String getLocalIpAddress() {
+ String localIp = "";
+ try {
+ for (Enumeration<NetworkInterface> en = NetworkInterface
+ .getNetworkInterfaces(); en.hasMoreElements();) {
+ NetworkInterface intf = en.nextElement();
+ for (Enumeration<InetAddress> enumIpAddr =
+ intf.getInetAddresses();
+ enumIpAddr.hasMoreElements(); ) {
+ InetAddress inetAddress = enumIpAddr.nextElement();
+ if (!inetAddress.isLoopbackAddress()) {
+ // Set the remote ip address the same as
+ // the local ip address of the last netif
+ localIp = inetAddress.getHostAddress().toString();
+ }
+ }
+ }
+ } catch (SocketException e) {
+ Log.e(TAG, "Unable to get local IP address. Not the end of the world", e);
+ }
+ return localIp;
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+ @Override
+ public void onCheckedChanged(RadioGroup group, int checkedId) {
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
new file mode 100644
index 0000000000..fb04a7aac3
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.widget.ArrayAdapter;
+import android.content.Context;
+import android.widget.TextView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.LayoutInflater;
+
+public class SpinnerAdapter extends ArrayAdapter<String> {
+ private String[] menuItems;
+ LayoutInflater inflater;
+ int textViewResourceId;
+
+ public SpinnerAdapter(Context context, int textViewResourceId,
+ String[] objects, LayoutInflater inflater) {
+ super(context, textViewResourceId, objects);
+ menuItems = objects;
+ this.inflater = inflater;
+ this.textViewResourceId = textViewResourceId;
+ }
+
+ @Override public View getDropDownView(int position, View convertView,
+ ViewGroup parent) {
+ return getCustomView(position, convertView, parent);
+ }
+
+ @Override public View getView(int position, View convertView,
+ ViewGroup parent) {
+ return getCustomView(position, convertView, parent);
+ }
+
+ private View getCustomView(int position, View v, ViewGroup parent) {
+ View row = inflater.inflate(textViewResourceId, parent, false);
+ TextView label = (TextView) row.findViewById(R.id.spinner_row);
+ label.setText(menuItems[position]);
+ return row;
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
new file mode 100644
index 0000000000..900355ad8e
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class VoiceEngine {
+ private final long nativeVoiceEngine;
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:NsModes
+ public enum NsModes {
+ UNCHANGED, DEFAULT, CONFERENCE, LOW_SUPPRESSION,
+ MODERATE_SUPPRESSION, HIGH_SUPPRESSION, VERY_HIGH_SUPPRESSION
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:AgcModes
+ public enum AgcModes {
+ UNCHANGED, DEFAULT, ADAPTIVE_ANALOG, ADAPTIVE_DIGITAL,
+ FIXED_DIGITAL
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:AecmModes
+ public enum AecmModes {
+ QUIET_EARPIECE_OR_HEADSET, EARPIECE, LOUD_EARPIECE,
+ SPEAKERPHONE, LOUD_SPEAKERPHONE
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:EcModes
+ public enum EcModes { UNCHANGED, DEFAULT, CONFERENCE, AEC, AECM }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:RtpDirections
+ public enum RtpDirections { INCOMING, OUTGOING }
+
+ public static class AgcConfig {
+ AgcConfig(int targetLevelDbOv, int digitalCompressionGaindB,
+ boolean limiterEnable) {
+ this.targetLevelDbOv = targetLevelDbOv;
+ this.digitalCompressionGaindB = digitalCompressionGaindB;
+ this.limiterEnable = limiterEnable;
+ }
+ private final int targetLevelDbOv;
+ private final int digitalCompressionGaindB;
+ private final boolean limiterEnable;
+ }
+
+ public VoiceEngine() {
+ nativeVoiceEngine = create();
+ }
+ private static native long create();
+ public native int init();
+ public native void dispose();
+ public native int createChannel();
+ public native int deleteChannel(int channel);
+ public native int setLocalReceiver(int channel, int port);
+ public native int setSendDestination(int channel, int port, String ipaddr);
+ public native int startListen(int channel);
+ public native int startPlayout(int channel);
+ public native int startSend(int channel);
+ public native int stopListen(int channel);
+ public native int stopPlayout(int channel);
+ public native int stopSend(int channel);
+ public native int setSpeakerVolume(int volume);
+ public native int setLoudspeakerStatus(boolean enable);
+ public native int startPlayingFileLocally(
+ int channel,
+ String fileName,
+ boolean loop);
+ public native int stopPlayingFileLocally(int channel);
+ public native int startPlayingFileAsMicrophone(
+ int channel,
+ String fileName,
+ boolean loop);
+ public native int stopPlayingFileAsMicrophone(int channel);
+ public native int numOfCodecs();
+ public native CodecInst getCodec(int index);
+ public native int setSendCodec(int channel, CodecInst codec);
+ public int setEcStatus(boolean enable, EcModes mode) {
+ return setEcStatus(enable, mode.ordinal());
+ }
+ private native int setEcStatus(boolean enable, int ec_mode);
+ public int setAecmMode(AecmModes aecm_mode, boolean cng) {
+ return setAecmMode(aecm_mode.ordinal(), cng);
+ }
+ private native int setAecmMode(int aecm_mode, boolean cng);
+ public int setAgcStatus(boolean enable, AgcModes agc_mode) {
+ return setAgcStatus(enable, agc_mode.ordinal());
+ }
+ private native int setAgcStatus(boolean enable, int agc_mode);
+ public native int setAgcConfig(AgcConfig agc_config);
+ public int setNsStatus(boolean enable, NsModes ns_mode) {
+ return setNsStatus(enable, ns_mode.ordinal());
+ }
+ private native int setNsStatus(boolean enable, int ns_mode);
+ public native int startDebugRecording(String file);
+ public native int stopDebugRecording();
+ public int startRtpDump(int channel, String file,
+ RtpDirections direction) {
+ return startRtpDump(channel, file, direction.ordinal());
+ }
+ private native int startRtpDump(int channel, String file,
+ int direction);
+ public int stopRtpDump(int channel, RtpDirections direction) {
+ return stopRtpDump(channel, direction.ordinal());
+ }
+ private native int stopRtpDump(int channel, int direction);
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
new file mode 100644
index 0000000000..3b972cf126
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.ActionBar.Tab;
+import android.app.ActionBar.TabListener;
+import android.app.ActionBar;
+import android.app.Activity;
+import android.app.Fragment;
+import android.app.FragmentTransaction;
+import android.content.pm.ActivityInfo;
+import android.media.AudioManager;
+import android.os.Bundle;
+import android.os.Handler;
+import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuInflater;
+import android.view.MenuItem;
+import android.view.WindowManager;
+
+public class WebRTCDemo extends Activity implements MenuStateProvider {
+
+ // From http://developer.android.com/guide/topics/ui/actionbar.html
+ public static class TabListener<T extends Fragment>
+ implements ActionBar.TabListener {
+ private Fragment fragment;
+ private final Activity activity;
+ private final String tag;
+ private final Class<T> instance;
+ private final Bundle args;
+
+ public TabListener(Activity activity, String tag, Class<T> clz) {
+ this(activity, tag, clz, null);
+ }
+
+ public TabListener(Activity activity, String tag, Class<T> clz,
+ Bundle args) {
+ this.activity = activity;
+ this.tag = tag;
+ this.instance = clz;
+ this.args = args;
+ }
+
+ public void onTabSelected(Tab tab, FragmentTransaction ft) {
+ // Check if the fragment is already initialized
+ if (fragment == null) {
+ // If not, instantiate and add it to the activity
+ fragment = Fragment.instantiate(activity, instance.getName(), args);
+ ft.add(android.R.id.content, fragment, tag);
+ } else {
+ // If it exists, simply attach it in order to show it
+ ft.attach(fragment);
+ }
+ }
+
+ public void onTabUnselected(Tab tab, FragmentTransaction ft) {
+ if (fragment != null) {
+ // Detach the fragment, because another one is being attached
+ ft.detach(fragment);
+ }
+ }
+
+ public void onTabReselected(Tab tab, FragmentTransaction ft) {
+ // User selected the already selected tab. Do nothing.
+ }
+ }
+
+ private NativeWebRtcContextRegistry contextRegistry = null;
+ private MediaEngine mediaEngine = null;
+ private Handler handler;
+ public MediaEngine getEngine() { return mediaEngine; }
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // Global settings.
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+
+ // State.
+ // Must be instantiated before MediaEngine.
+ contextRegistry = new NativeWebRtcContextRegistry();
+ contextRegistry.register(this);
+
+ // Load all settings dictated in xml.
+ mediaEngine = new MediaEngine(this);
+ mediaEngine.setRemoteIp(getResources().getString(R.string.loopbackIp));
+
+ mediaEngine.setAudio(getResources().getBoolean(
+ R.bool.audio_enabled_default));
+ mediaEngine.setAudioCodec(mediaEngine.getIsacIndex());
+ mediaEngine.setAudioRxPort(getResources().getInteger(
+ R.integer.aRxPortDefault));
+ mediaEngine.setAudioTxPort(getResources().getInteger(
+ R.integer.aTxPortDefault));
+ mediaEngine.setSpeaker(getResources().getBoolean(
+ R.bool.speaker_enabled_default));
+ mediaEngine.setDebuging(getResources().getBoolean(
+ R.bool.apm_debug_enabled_default));
+
+ // Create action bar with all tabs.
+ ActionBar actionBar = getActionBar();
+ actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
+ actionBar.setDisplayShowTitleEnabled(false);
+
+ Tab tab = actionBar.newTab()
+ .setText("Main")
+ .setTabListener(new TabListener<MainMenuFragment>(
+ this, "main", MainMenuFragment.class));
+ actionBar.addTab(tab);
+
+ tab = actionBar.newTab()
+ .setText("Settings")
+ .setTabListener(new TabListener<SettingsMenuFragment>(
+ this, "Settings", SettingsMenuFragment.class));
+ actionBar.addTab(tab);
+
+ tab = actionBar.newTab()
+ .setText("Audio")
+ .setTabListener(new TabListener<AudioMenuFragment>(
+ this, "Audio", AudioMenuFragment.class));
+ actionBar.addTab(tab);
+
+ enableTimedStartStop();
+
+ // Hint that voice call audio stream should be used for hardware volume
+ // controls.
+ setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ @Override
+ public boolean onCreateOptionsMenu(Menu menu) {
+ MenuInflater inflater = getMenuInflater();
+ inflater.inflate(R.menu.main_activity_actions, menu);
+ return super.onCreateOptionsMenu(menu);
+ }
+
+ @Override
+ public boolean onOptionsItemSelected(MenuItem item) {
+ // Handle presses on the action bar items
+ switch (item.getItemId()) {
+ case R.id.action_exit:
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.stopAll();
+ finish();
+ return true;
+ default:
+ return super.onOptionsItemSelected(item);
+ }
+ }
+
+ @Override
+ public void onDestroy() {
+ disableTimedStartStop();
+ mediaEngine.dispose();
+ contextRegistry.unRegister();
+ super.onDestroy();
+ }
+
+ @Override
+ public boolean onKeyDown(int keyCode, KeyEvent event) {
+ if (keyCode == KeyEvent.KEYCODE_BACK) {
+ // Prevent app from running in the background.
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.stopAll();
+ finish();
+ return true;
+ }
+ return super.onKeyDown(keyCode, event);
+ }
+
+ private int getCallRestartPeriodicity() {
+ return getResources().getInteger(R.integer.call_restart_periodicity_ms);
+ }
+
+ // Thread repeatedly calling start/stop.
+ void enableTimedStartStop() {
+ if (getCallRestartPeriodicity() > 0) {
+ // Periodicity == 0 <-> Disabled.
+ handler = new Handler();
+ handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
+ }
+ }
+
+ void disableTimedStartStop() {
+ if (handler != null) {
+ handler.removeCallbacks(startOrStopCallback);
+ }
+ }
+
+ private Runnable startOrStopCallback = new Runnable() {
+ public void run() {
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.toggleStart();
+ handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
+ }
+ };
+}