aboutsummaryrefslogtreecommitdiff
path: root/webrtc/examples/android/media_demo/src
diff options
context:
space:
mode:
Diffstat (limited to 'webrtc/examples/android/media_demo/src')
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java156
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java39
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java123
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java321
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java22
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java32
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java129
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java49
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java117
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java210
12 files changed, 1228 insertions, 0 deletions
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
new file mode 100644
index 0000000000..94e23c2465
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.Spinner;
+import android.widget.TextView;
+import java.lang.Integer;
+
+public class AudioMenuFragment extends Fragment {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.audiomenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ String[] audioCodecsStrings = getEngine().audioCodecsAsString();
+ Spinner spAudioCodecType = (Spinner) v.findViewById(R.id.spAudioCodecType);
+ spAudioCodecType.setAdapter(new SpinnerAdapter(getActivity(),
+ R.layout.dropdownitems,
+ audioCodecsStrings,
+ inflater));
+ spAudioCodecType.setSelection(getEngine().audioCodecIndex());
+ spAudioCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
+ public void onItemSelected(AdapterView<?> adapterView, View view,
+ int position, long id) {
+ getEngine().setAudioCodec(position);
+ }
+ public void onNothingSelected(AdapterView<?> arg0) {
+ Log.d(TAG, "No setting selected");
+ }
+ });
+
+ EditText etATxPort = (EditText) v.findViewById(R.id.etATxPort);
+ etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
+ etATxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etATxPort = (EditText) editText;
+ getEngine()
+ .setAudioTxPort(Integer.parseInt(etATxPort.getText().toString()));
+ etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
+ }
+ });
+ EditText etARxPort = (EditText) v.findViewById(R.id.etARxPort);
+ etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
+ etARxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etARxPort = (EditText) editText;
+ getEngine()
+ .setAudioRxPort(Integer.parseInt(etARxPort.getText().toString()));
+ etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
+
+ }
+ });
+
+ CheckBox cbEnableAecm = (CheckBox) v.findViewById(R.id.cbAecm);
+ cbEnableAecm.setChecked(getEngine().aecmEnabled());
+ cbEnableAecm.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAecm = (CheckBox) checkBox;
+ getEngine().setEc(cbEnableAecm.isChecked());
+ cbEnableAecm.setChecked(getEngine().aecmEnabled());
+ }
+ });
+ CheckBox cbEnableNs = (CheckBox) v.findViewById(R.id.cbNoiseSuppression);
+ cbEnableNs.setChecked(getEngine().nsEnabled());
+ cbEnableNs.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableNs = (CheckBox) checkBox;
+ getEngine().setNs(cbEnableNs.isChecked());
+ cbEnableNs.setChecked(getEngine().nsEnabled());
+ }
+ });
+ CheckBox cbEnableAgc = (CheckBox) v.findViewById(R.id.cbAutoGainControl);
+ cbEnableAgc.setChecked(getEngine().agcEnabled());
+ cbEnableAgc.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAgc = (CheckBox) checkBox;
+ getEngine().setAgc(cbEnableAgc.isChecked());
+ cbEnableAgc.setChecked(getEngine().agcEnabled());
+ }
+ });
+ CheckBox cbEnableSpeaker = (CheckBox) v.findViewById(R.id.cbSpeaker);
+ cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
+ cbEnableSpeaker.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableSpeaker = (CheckBox) checkBox;
+ getEngine().setSpeaker(cbEnableSpeaker.isChecked());
+ cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
+ }
+ });
+ CheckBox cbEnableDebugAPM =
+ (CheckBox) v.findViewById(R.id.cbDebugRecording);
+ cbEnableDebugAPM.setChecked(getEngine().apmRecord());
+ cbEnableDebugAPM.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableDebugAPM = (CheckBox) checkBox;
+ getEngine().setDebuging(cbEnableDebugAPM.isChecked());
+ cbEnableDebugAPM.setChecked(getEngine().apmRecord());
+ }
+ });
+ CheckBox cbEnableAudioRTPDump =
+ (CheckBox) v.findViewById(R.id.cbAudioRTPDump);
+ cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
+ cbEnableAudioRTPDump.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAudioRTPDump = (CheckBox) checkBox;
+ getEngine().setIncomingVoeRtpDump(cbEnableAudioRTPDump.isChecked());
+ cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
+ }
+ });
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
new file mode 100644
index 0000000000..133d63926b
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class CodecInst {
+ private final long nativeCodecInst;
+
+ // CodecInst can only be created from the native layer.
+ private CodecInst(long nativeCodecInst) {
+ this.nativeCodecInst = nativeCodecInst;
+ }
+
+ public String toString() {
+ return name() + " " +
+ "PlType: " + plType() + " " +
+ "PlFreq: " + plFrequency() + " " +
+ "Size: " + pacSize() + " " +
+ "Channels: " + channels() + " " +
+ "Rate: " + rate();
+ }
+
+ // Dispose must be called before all references to CodecInst are lost as it
+ // will free memory allocated in the native layer.
+ public native void dispose();
+ public native int plType();
+ public native String name();
+ public native int plFrequency();
+ public native int pacSize();
+ public native int channels();
+ public native int rate();
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
new file mode 100644
index 0000000000..793d784043
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.widget.LinearLayout;
+import android.widget.TextView;
+
+public class MainMenuFragment extends Fragment implements MediaEngineObserver {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ private Button btStartStopCall;
+ private TextView tvStats;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.mainmenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ Button btStats = (Button) v.findViewById(R.id.btStats);
+ boolean stats = getResources().getBoolean(R.bool.stats_enabled_default);
+ enableStats(btStats, stats);
+ btStats.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View button) {
+ boolean turnOnStats = ((Button) button).getText().equals(
+ getResources().getString(R.string.statsOn));
+ enableStats((Button) button, turnOnStats);
+ }
+ });
+ tvStats = (TextView) v.findViewById(R.id.tvStats);
+
+ btStartStopCall = (Button) v.findViewById(R.id.btStartStopCall);
+ btStartStopCall.setText(getEngine().isRunning() ?
+ R.string.stopCall :
+ R.string.startCall);
+ btStartStopCall.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View button) {
+ toggleStart();
+ }
+ });
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ // tvStats need to be updated on the UI thread.
+ public void newStats(final String stats) {
+ getActivity().runOnUiThread(new Runnable() {
+ public void run() {
+ tvStats.setText(stats);
+ }
+ });
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+ private void enableStats(Button btStats, boolean enable) {
+ if (enable) {
+ getEngine().setObserver(this);
+ } else {
+ getEngine().setObserver(null);
+ // Clear old stats text by posting empty stats.
+ newStats("");
+ }
+ // If stats was true it was just turned on. This means that
+ // clicking the button again should turn off stats.
+ btStats.setText(enable ? R.string.statsOff : R.string.statsOn);
+ }
+
+
+ public void toggleStart() {
+ if (getEngine().isRunning()) {
+ stopAll();
+ } else {
+ startCall();
+ }
+ btStartStopCall.setText(getEngine().isRunning() ?
+ R.string.stopCall :
+ R.string.startCall);
+ }
+
+ public void stopAll() {
+ getEngine().stop();
+ }
+
+ private void startCall() {
+ getEngine().start();
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
new file mode 100644
index 0000000000..a7036914ff
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
@@ -0,0 +1,321 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.AlertDialog;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.media.AudioManager;
+import android.os.Environment;
+import android.util.Log;
+import android.view.OrientationEventListener;
+import java.io.File;
+
+public class MediaEngine {
+ private static final String LOG_DIR = "webrtc";
+
+ // Checks for and communicate failures to user (logcat and popup).
+ private void check(boolean value, String message) {
+ if (value) {
+ return;
+ }
+ Log.e("WEBRTC-CHECK", message);
+ AlertDialog alertDialog = new AlertDialog.Builder(context).create();
+ alertDialog.setTitle("WebRTC Error");
+ alertDialog.setMessage(message);
+ alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
+ "OK",
+ new DialogInterface.OnClickListener() {
+ public void onClick(DialogInterface dialog, int which) {
+ dialog.dismiss();
+ return;
+ }
+ }
+ );
+ alertDialog.show();
+ }
+
+
+ // Shared Audio/Video members.
+ private final Context context;
+ private String remoteIp;
+ private boolean enableTrace;
+
+ // Audio
+ private VoiceEngine voe;
+ private int audioChannel;
+ private boolean audioEnabled;
+ private boolean voeRunning;
+ private int audioCodecIndex;
+ private int audioTxPort;
+ private int audioRxPort;
+
+ private boolean speakerEnabled;
+ private boolean headsetPluggedIn;
+ private boolean enableAgc;
+ private boolean enableNs;
+ private boolean enableAecm;
+
+ private BroadcastReceiver headsetListener;
+
+ private boolean audioRtpDump;
+ private boolean apmRecord;
+
+ private int inFps;
+ private int inKbps;
+ private int outFps;
+ private int outKbps;
+ private int inWidth;
+ private int inHeight;
+
+ public MediaEngine(Context context) {
+ this.context = context;
+ voe = new VoiceEngine();
+ check(voe.init() == 0, "Failed voe Init");
+ audioChannel = voe.createChannel();
+ check(audioChannel >= 0, "Failed voe CreateChannel");
+ check(audioChannel >= 0, "Failed voe CreateChannel");
+
+ check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
+ "VoE set Aecm speakerphone mode failed");
+
+ // Set audio mode to communication
+ AudioManager audioManager =
+ ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
+ audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
+ // Listen to headset being plugged in/out.
+ IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
+ headsetListener = new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
+ headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
+ updateAudioOutput();
+ }
+ }
+ };
+ context.registerReceiver(headsetListener, receiverFilter);
+ }
+
+ public void dispose() {
+ check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
+ context.unregisterReceiver(headsetListener);
+ check(voe.deleteChannel(audioChannel) == 0, "VoE delete channel failed");
+ voe.dispose();
+ }
+
+ public void start() {
+ if (audioEnabled) {
+ startVoE();
+ }
+ }
+
+ public void stop() {
+ stopVoe();
+ }
+
+ public boolean isRunning() {
+ return voeRunning;
+ }
+
+ public void setRemoteIp(String remoteIp) {
+ this.remoteIp = remoteIp;
+ UpdateSendDestination();
+ }
+
+ public String remoteIp() { return remoteIp; }
+
+ private String getDebugDirectory() {
+ // Should create a folder in /scard/|LOG_DIR|
+ return Environment.getExternalStorageDirectory().toString() + "/" +
+ LOG_DIR;
+ }
+
+ private boolean createDebugDirectory() {
+ File webrtc_dir = new File(getDebugDirectory());
+ if (!webrtc_dir.exists()) {
+ return webrtc_dir.mkdir();
+ }
+ return webrtc_dir.isDirectory();
+ }
+
+ public void startVoE() {
+ check(!voeRunning, "VoE already started");
+ check(voe.startListen(audioChannel) == 0, "Failed StartListen");
+ check(voe.startPlayout(audioChannel) == 0, "VoE start playout failed");
+ check(voe.startSend(audioChannel) == 0, "VoE start send failed");
+ voeRunning = true;
+ }
+
+ private void stopVoe() {
+ check(voeRunning, "VoE not started");
+ check(voe.stopSend(audioChannel) == 0, "VoE stop send failed");
+ check(voe.stopPlayout(audioChannel) == 0, "VoE stop playout failed");
+ check(voe.stopListen(audioChannel) == 0, "VoE stop listen failed");
+ voeRunning = false;
+ }
+
+ public void setAudio(boolean audioEnabled) {
+ this.audioEnabled = audioEnabled;
+ }
+
+ public boolean audioEnabled() { return audioEnabled; }
+
+ public int audioCodecIndex() { return audioCodecIndex; }
+
+ public void setAudioCodec(int codecNumber) {
+ audioCodecIndex = codecNumber;
+ CodecInst codec = voe.getCodec(codecNumber);
+ check(voe.setSendCodec(audioChannel, codec) == 0, "Failed setSendCodec");
+ codec.dispose();
+ }
+
+ public String[] audioCodecsAsString() {
+ String[] retVal = new String[voe.numOfCodecs()];
+ for (int i = 0; i < voe.numOfCodecs(); ++i) {
+ CodecInst codec = voe.getCodec(i);
+ retVal[i] = codec.toString();
+ codec.dispose();
+ }
+ return retVal;
+ }
+
+ private CodecInst[] defaultAudioCodecs() {
+ CodecInst[] retVal = new CodecInst[voe.numOfCodecs()];
+ for (int i = 0; i < voe.numOfCodecs(); ++i) {
+ retVal[i] = voe.getCodec(i);
+ }
+ return retVal;
+ }
+
+ public int getIsacIndex() {
+ CodecInst[] codecs = defaultAudioCodecs();
+ for (int i = 0; i < codecs.length; ++i) {
+ if (codecs[i].name().contains("ISAC")) {
+ return i;
+ }
+ }
+ return 0;
+ }
+
+ public void setAudioTxPort(int audioTxPort) {
+ this.audioTxPort = audioTxPort;
+ UpdateSendDestination();
+ }
+
+ public int audioTxPort() { return audioTxPort; }
+
+ public void setAudioRxPort(int audioRxPort) {
+ check(voe.setLocalReceiver(audioChannel, audioRxPort) == 0,
+ "Failed setLocalReceiver");
+ this.audioRxPort = audioRxPort;
+ }
+
+ public int audioRxPort() { return audioRxPort; }
+
+ public boolean agcEnabled() { return enableAgc; }
+
+ public void setAgc(boolean enable) {
+ enableAgc = enable;
+ VoiceEngine.AgcConfig agc_config =
+ new VoiceEngine.AgcConfig(3, 9, true);
+ check(voe.setAgcConfig(agc_config) == 0, "VoE set AGC Config failed");
+ check(voe.setAgcStatus(enableAgc, VoiceEngine.AgcModes.FIXED_DIGITAL) == 0,
+ "VoE set AGC Status failed");
+ }
+
+ public boolean nsEnabled() { return enableNs; }
+
+ public void setNs(boolean enable) {
+ enableNs = enable;
+ check(voe.setNsStatus(enableNs,
+ VoiceEngine.NsModes.MODERATE_SUPPRESSION) == 0,
+ "VoE set NS Status failed");
+ }
+
+ public boolean aecmEnabled() { return enableAecm; }
+
+ public void setEc(boolean enable) {
+ enableAecm = enable;
+ check(voe.setEcStatus(enable, VoiceEngine.EcModes.AECM) == 0,
+ "voe setEcStatus");
+ }
+
+ public boolean speakerEnabled() {
+ return speakerEnabled;
+ }
+
+ public void setSpeaker(boolean enable) {
+ speakerEnabled = enable;
+ updateAudioOutput();
+ }
+
+ // Debug helpers.
+ public boolean apmRecord() { return apmRecord; }
+
+ public boolean audioRtpDump() { return audioRtpDump; }
+
+ public void setDebuging(boolean enable) {
+ apmRecord = enable;
+ if (!enable) {
+ check(voe.stopDebugRecording() == 0, "Failed stopping debug");
+ return;
+ }
+ if (!createDebugDirectory()) {
+ check(false, "Unable to create debug directory.");
+ return;
+ }
+ String debugDirectory = getDebugDirectory();
+ check(voe.startDebugRecording(debugDirectory + String.format("/apm_%d.dat",
+ System.currentTimeMillis())) == 0,
+ "Failed starting debug");
+ }
+
+ public void setIncomingVoeRtpDump(boolean enable) {
+ audioRtpDump = enable;
+ if (!enable) {
+ check(voe.stopRtpDump(audioChannel,
+ VoiceEngine.RtpDirections.INCOMING) == 0,
+ "voe stopping rtp dump");
+ return;
+ }
+ String debugDirectory = getDebugDirectory();
+ check(voe.startRtpDump(audioChannel, debugDirectory +
+ String.format("/voe_%d.rtp", System.currentTimeMillis()),
+ VoiceEngine.RtpDirections.INCOMING) == 0,
+ "voe starting rtp dump");
+ }
+
+ private void updateAudioOutput() {
+ boolean useSpeaker = !headsetPluggedIn && speakerEnabled;
+ AudioManager audioManager =
+ ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
+ audioManager.setSpeakerphoneOn(useSpeaker);
+ }
+
+ private void UpdateSendDestination() {
+ if (remoteIp == null) {
+ return;
+ }
+ if (audioTxPort != 0) {
+ check(voe.setSendDestination(audioChannel, audioTxPort,
+ remoteIp) == 0, "VoE set send destination failed");
+ }
+ }
+
+ MediaEngineObserver observer;
+ public void setObserver(MediaEngineObserver observer) {
+ this.observer = observer;
+ }
+}
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
new file mode 100644
index 0000000000..3ea91b5e92
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public interface MediaEngineObserver {
+ void newStats(String stats);
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
new file mode 100644
index 0000000000..08cb508667
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public interface MenuStateProvider {
+ public MediaEngine getEngine();
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
new file mode 100644
index 0000000000..3d4f00a4f6
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.content.Context;
+
+public class NativeWebRtcContextRegistry {
+ static {
+ System.loadLibrary("webrtcdemo-jni");
+ }
+
+ public native void register(Context context);
+ public native void unRegister();
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
new file mode 100644
index 0000000000..dbe817b1af
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class RtcpStatistics {
+ // Definition of fraction lost can be found in RFC3550.
+ // It is equivalent to taking the integer part after multiplying the loss
+ // fraction by 256.
+ public final int fractionLost;
+ public final int cumulativeLost;
+ public final int extendedMax;
+ public final int jitter;
+ public final int rttMs;
+
+ // Only allowed to be created by the native layer.
+ private RtcpStatistics(int fractionLost, int cumulativeLost, int extendedMax,
+ int jitter, int rttMs) {
+ this.fractionLost = fractionLost;
+ this.cumulativeLost = cumulativeLost;
+ this.extendedMax = extendedMax;
+ this.jitter = jitter;
+ this.rttMs = rttMs;
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
new file mode 100644
index 0000000000..761f96ce29
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.RadioGroup;
+import android.widget.TextView;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.Enumeration;
+
+public class SettingsMenuFragment extends Fragment
+ implements RadioGroup.OnCheckedChangeListener {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ EditText etRemoteIp;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.settingsmenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ CheckBox cbAudio = (CheckBox) v.findViewById(R.id.cbAudio);
+ cbAudio.setChecked(getEngine().audioEnabled());
+ cbAudio.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbAudio = (CheckBox) checkBox;
+ getEngine().setAudio(cbAudio.isChecked());
+ cbAudio.setChecked(getEngine().audioEnabled());
+ }
+ });
+ boolean loopback =
+ getResources().getBoolean(R.bool.loopback_enabled_default);
+ CheckBox cbLoopback = (CheckBox) v.findViewById(R.id.cbLoopback);
+ cbLoopback.setChecked(loopback);
+ cbLoopback.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ loopbackChanged((CheckBox) checkBox);
+ }
+ });
+ etRemoteIp = (EditText) v.findViewById(R.id.etRemoteIp);
+ etRemoteIp.setOnFocusChangeListener(new View.OnFocusChangeListener() {
+ public void onFocusChange(View editText, boolean hasFocus) {
+ if (!hasFocus) {
+ getEngine().setRemoteIp(etRemoteIp.getText().toString());
+ }
+ }
+ });
+ // Has to be after remote IP as loopback changes it.
+ loopbackChanged(cbLoopback);
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ private void loopbackChanged(CheckBox cbLoopback) {
+ boolean loopback = cbLoopback.isChecked();
+ etRemoteIp.setText(loopback ? getLoopbackIPString() : getLocalIpAddress());
+ getEngine().setRemoteIp(etRemoteIp.getText().toString());
+ }
+
+ private String getLoopbackIPString() {
+ return getResources().getString(R.string.loopbackIp);
+ }
+
+ private String getLocalIpAddress() {
+ String localIp = "";
+ try {
+ for (Enumeration<NetworkInterface> en = NetworkInterface
+ .getNetworkInterfaces(); en.hasMoreElements();) {
+ NetworkInterface intf = en.nextElement();
+ for (Enumeration<InetAddress> enumIpAddr =
+ intf.getInetAddresses();
+ enumIpAddr.hasMoreElements(); ) {
+ InetAddress inetAddress = enumIpAddr.nextElement();
+ if (!inetAddress.isLoopbackAddress()) {
+ // Set the remote ip address the same as
+ // the local ip address of the last netif
+ localIp = inetAddress.getHostAddress().toString();
+ }
+ }
+ }
+ } catch (SocketException e) {
+ Log.e(TAG, "Unable to get local IP address. Not the end of the world", e);
+ }
+ return localIp;
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+ @Override
+ public void onCheckedChanged(RadioGroup group, int checkedId) {
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
new file mode 100644
index 0000000000..fb04a7aac3
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.widget.ArrayAdapter;
+import android.content.Context;
+import android.widget.TextView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.LayoutInflater;
+
+public class SpinnerAdapter extends ArrayAdapter<String> {
+ private String[] menuItems;
+ LayoutInflater inflater;
+ int textViewResourceId;
+
+ public SpinnerAdapter(Context context, int textViewResourceId,
+ String[] objects, LayoutInflater inflater) {
+ super(context, textViewResourceId, objects);
+ menuItems = objects;
+ this.inflater = inflater;
+ this.textViewResourceId = textViewResourceId;
+ }
+
+ @Override public View getDropDownView(int position, View convertView,
+ ViewGroup parent) {
+ return getCustomView(position, convertView, parent);
+ }
+
+ @Override public View getView(int position, View convertView,
+ ViewGroup parent) {
+ return getCustomView(position, convertView, parent);
+ }
+
+ private View getCustomView(int position, View v, ViewGroup parent) {
+ View row = inflater.inflate(textViewResourceId, parent, false);
+ TextView label = (TextView) row.findViewById(R.id.spinner_row);
+ label.setText(menuItems[position]);
+ return row;
+ }
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
new file mode 100644
index 0000000000..900355ad8e
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class VoiceEngine {
+ private final long nativeVoiceEngine;
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:NsModes
+ public enum NsModes {
+ UNCHANGED, DEFAULT, CONFERENCE, LOW_SUPPRESSION,
+ MODERATE_SUPPRESSION, HIGH_SUPPRESSION, VERY_HIGH_SUPPRESSION
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:AgcModes
+ public enum AgcModes {
+ UNCHANGED, DEFAULT, ADAPTIVE_ANALOG, ADAPTIVE_DIGITAL,
+ FIXED_DIGITAL
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:AecmModes
+ public enum AecmModes {
+ QUIET_EARPIECE_OR_HEADSET, EARPIECE, LOUD_EARPIECE,
+ SPEAKERPHONE, LOUD_SPEAKERPHONE
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:EcModes
+ public enum EcModes { UNCHANGED, DEFAULT, CONFERENCE, AEC, AECM }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:RtpDirections
+ public enum RtpDirections { INCOMING, OUTGOING }
+
+ public static class AgcConfig {
+ AgcConfig(int targetLevelDbOv, int digitalCompressionGaindB,
+ boolean limiterEnable) {
+ this.targetLevelDbOv = targetLevelDbOv;
+ this.digitalCompressionGaindB = digitalCompressionGaindB;
+ this.limiterEnable = limiterEnable;
+ }
+ private final int targetLevelDbOv;
+ private final int digitalCompressionGaindB;
+ private final boolean limiterEnable;
+ }
+
+ public VoiceEngine() {
+ nativeVoiceEngine = create();
+ }
+ private static native long create();
+ public native int init();
+ public native void dispose();
+ public native int createChannel();
+ public native int deleteChannel(int channel);
+ public native int setLocalReceiver(int channel, int port);
+ public native int setSendDestination(int channel, int port, String ipaddr);
+ public native int startListen(int channel);
+ public native int startPlayout(int channel);
+ public native int startSend(int channel);
+ public native int stopListen(int channel);
+ public native int stopPlayout(int channel);
+ public native int stopSend(int channel);
+ public native int setSpeakerVolume(int volume);
+ public native int setLoudspeakerStatus(boolean enable);
+ public native int startPlayingFileLocally(
+ int channel,
+ String fileName,
+ boolean loop);
+ public native int stopPlayingFileLocally(int channel);
+ public native int startPlayingFileAsMicrophone(
+ int channel,
+ String fileName,
+ boolean loop);
+ public native int stopPlayingFileAsMicrophone(int channel);
+ public native int numOfCodecs();
+ public native CodecInst getCodec(int index);
+ public native int setSendCodec(int channel, CodecInst codec);
+ public int setEcStatus(boolean enable, EcModes mode) {
+ return setEcStatus(enable, mode.ordinal());
+ }
+ private native int setEcStatus(boolean enable, int ec_mode);
+ public int setAecmMode(AecmModes aecm_mode, boolean cng) {
+ return setAecmMode(aecm_mode.ordinal(), cng);
+ }
+ private native int setAecmMode(int aecm_mode, boolean cng);
+ public int setAgcStatus(boolean enable, AgcModes agc_mode) {
+ return setAgcStatus(enable, agc_mode.ordinal());
+ }
+ private native int setAgcStatus(boolean enable, int agc_mode);
+ public native int setAgcConfig(AgcConfig agc_config);
+ public int setNsStatus(boolean enable, NsModes ns_mode) {
+ return setNsStatus(enable, ns_mode.ordinal());
+ }
+ private native int setNsStatus(boolean enable, int ns_mode);
+ public native int startDebugRecording(String file);
+ public native int stopDebugRecording();
+ public int startRtpDump(int channel, String file,
+ RtpDirections direction) {
+ return startRtpDump(channel, file, direction.ordinal());
+ }
+ private native int startRtpDump(int channel, String file,
+ int direction);
+ public int stopRtpDump(int channel, RtpDirections direction) {
+ return stopRtpDump(channel, direction.ordinal());
+ }
+ private native int stopRtpDump(int channel, int direction);
+} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
new file mode 100644
index 0000000000..3b972cf126
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.ActionBar.Tab;
+import android.app.ActionBar.TabListener;
+import android.app.ActionBar;
+import android.app.Activity;
+import android.app.Fragment;
+import android.app.FragmentTransaction;
+import android.content.pm.ActivityInfo;
+import android.media.AudioManager;
+import android.os.Bundle;
+import android.os.Handler;
+import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuInflater;
+import android.view.MenuItem;
+import android.view.WindowManager;
+
+public class WebRTCDemo extends Activity implements MenuStateProvider {
+
+ // From http://developer.android.com/guide/topics/ui/actionbar.html
+ public static class TabListener<T extends Fragment>
+ implements ActionBar.TabListener {
+ private Fragment fragment;
+ private final Activity activity;
+ private final String tag;
+ private final Class<T> instance;
+ private final Bundle args;
+
+ public TabListener(Activity activity, String tag, Class<T> clz) {
+ this(activity, tag, clz, null);
+ }
+
+ public TabListener(Activity activity, String tag, Class<T> clz,
+ Bundle args) {
+ this.activity = activity;
+ this.tag = tag;
+ this.instance = clz;
+ this.args = args;
+ }
+
+ public void onTabSelected(Tab tab, FragmentTransaction ft) {
+ // Check if the fragment is already initialized
+ if (fragment == null) {
+ // If not, instantiate and add it to the activity
+ fragment = Fragment.instantiate(activity, instance.getName(), args);
+ ft.add(android.R.id.content, fragment, tag);
+ } else {
+ // If it exists, simply attach it in order to show it
+ ft.attach(fragment);
+ }
+ }
+
+ public void onTabUnselected(Tab tab, FragmentTransaction ft) {
+ if (fragment != null) {
+ // Detach the fragment, because another one is being attached
+ ft.detach(fragment);
+ }
+ }
+
+ public void onTabReselected(Tab tab, FragmentTransaction ft) {
+ // User selected the already selected tab. Do nothing.
+ }
+ }
+
+ private NativeWebRtcContextRegistry contextRegistry = null;
+ private MediaEngine mediaEngine = null;
+ private Handler handler;
+ public MediaEngine getEngine() { return mediaEngine; }
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // Global settings.
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+
+ // State.
+ // Must be instantiated before MediaEngine.
+ contextRegistry = new NativeWebRtcContextRegistry();
+ contextRegistry.register(this);
+
+ // Load all settings dictated in xml.
+ mediaEngine = new MediaEngine(this);
+ mediaEngine.setRemoteIp(getResources().getString(R.string.loopbackIp));
+
+ mediaEngine.setAudio(getResources().getBoolean(
+ R.bool.audio_enabled_default));
+ mediaEngine.setAudioCodec(mediaEngine.getIsacIndex());
+ mediaEngine.setAudioRxPort(getResources().getInteger(
+ R.integer.aRxPortDefault));
+ mediaEngine.setAudioTxPort(getResources().getInteger(
+ R.integer.aTxPortDefault));
+ mediaEngine.setSpeaker(getResources().getBoolean(
+ R.bool.speaker_enabled_default));
+ mediaEngine.setDebuging(getResources().getBoolean(
+ R.bool.apm_debug_enabled_default));
+
+ // Create action bar with all tabs.
+ ActionBar actionBar = getActionBar();
+ actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
+ actionBar.setDisplayShowTitleEnabled(false);
+
+ Tab tab = actionBar.newTab()
+ .setText("Main")
+ .setTabListener(new TabListener<MainMenuFragment>(
+ this, "main", MainMenuFragment.class));
+ actionBar.addTab(tab);
+
+ tab = actionBar.newTab()
+ .setText("Settings")
+ .setTabListener(new TabListener<SettingsMenuFragment>(
+ this, "Settings", SettingsMenuFragment.class));
+ actionBar.addTab(tab);
+
+ tab = actionBar.newTab()
+ .setText("Audio")
+ .setTabListener(new TabListener<AudioMenuFragment>(
+ this, "Audio", AudioMenuFragment.class));
+ actionBar.addTab(tab);
+
+ enableTimedStartStop();
+
+ // Hint that voice call audio stream should be used for hardware volume
+ // controls.
+ setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ @Override
+ public boolean onCreateOptionsMenu(Menu menu) {
+ MenuInflater inflater = getMenuInflater();
+ inflater.inflate(R.menu.main_activity_actions, menu);
+ return super.onCreateOptionsMenu(menu);
+ }
+
+ @Override
+ public boolean onOptionsItemSelected(MenuItem item) {
+ // Handle presses on the action bar items
+ switch (item.getItemId()) {
+ case R.id.action_exit:
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.stopAll();
+ finish();
+ return true;
+ default:
+ return super.onOptionsItemSelected(item);
+ }
+ }
+
+ @Override
+ public void onDestroy() {
+ disableTimedStartStop();
+ mediaEngine.dispose();
+ contextRegistry.unRegister();
+ super.onDestroy();
+ }
+
+ @Override
+ public boolean onKeyDown(int keyCode, KeyEvent event) {
+ if (keyCode == KeyEvent.KEYCODE_BACK) {
+ // Prevent app from running in the background.
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.stopAll();
+ finish();
+ return true;
+ }
+ return super.onKeyDown(keyCode, event);
+ }
+
+ private int getCallRestartPeriodicity() {
+ return getResources().getInteger(R.integer.call_restart_periodicity_ms);
+ }
+
+ // Thread repeatedly calling start/stop.
+ void enableTimedStartStop() {
+ if (getCallRestartPeriodicity() > 0) {
+ // Periodicity == 0 <-> Disabled.
+ handler = new Handler();
+ handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
+ }
+ }
+
+ void disableTimedStartStop() {
+ if (handler != null) {
+ handler.removeCallbacks(startOrStopCallback);
+ }
+ }
+
+ private Runnable startOrStopCallback = new Runnable() {
+ public void run() {
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.toggleStart();
+ handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
+ }
+ };
+}