summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGlenn Kasten <gkasten@google.com>2015-09-08 09:19:29 -0700
committerGlenn Kasten <gkasten@google.com>2015-09-08 09:39:30 -0700
commit09b4fc56046ea864f1c0d4620c0d3fe356a5f4b2 (patch)
tree22b4a10f72ca8be01968192ae63de44cb8b78de2
parent1a59efad94df3d553c2ff6f6060fc30ba088a0cb (diff)
downloaddrrickorang-09b4fc56046ea864f1c0d4620c0d3fe356a5f4b2.tar.gz
Initialize from commit 4553e518f23d03eac2277da955c8709c05050281
-rw-r--r--LoopbackApp/.gitignore6
-rw-r--r--LoopbackApp/.idea/compiler.xml2
-rw-r--r--LoopbackApp/.idea/dictionaries/rago.xml3
-rw-r--r--LoopbackApp/.idea/misc.xml61
-rw-r--r--LoopbackApp/.idea/runConfigurations.xml12
-rw-r--r--LoopbackApp/LoopbackApp.iml2
-rw-r--r--LoopbackApp/app/.gitignore1
-rw-r--r--LoopbackApp/app/app.iml8
-rw-r--r--LoopbackApp/app/build.gradle2
-rw-r--r--LoopbackApp/app/src/main/AndroidManifest.xml39
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/.DS_Storebin6148 -> 0 bytes
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java51
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java114
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java103
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java127
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java55
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java78
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java112
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java285
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesActivity.java89
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java356
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java (renamed from LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriodActivity.java)37
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java1587
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java222
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java653
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java432
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java277
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java137
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java174
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java146
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/PlayerBufferPeriodActivity.java64
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderBufferPeriodActivity.java70
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java544
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java295
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java88
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/ToneGeneration.java58
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java110
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java59
-rw-r--r--LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java605
-rw-r--r--LoopbackApp/app/src/main/jni/audio_utils/atomic.c10
-rw-r--r--LoopbackApp/app/src/main/jni/audio_utils/fifo.c38
-rw-r--r--LoopbackApp/app/src/main/jni/audio_utils/roundup.c4
-rw-r--r--LoopbackApp/app/src/main/jni/jni_sles.c73
-rw-r--r--LoopbackApp/app/src/main/jni/jni_sles.h17
-rw-r--r--LoopbackApp/app/src/main/jni/sles.cpp389
-rw-r--r--LoopbackApp/app/src/main/jni/sles.h49
-rw-r--r--LoopbackApp/app/src/main/res/layout/about_activity.xml31
-rw-r--r--LoopbackApp/app/src/main/res/layout/glitches_activity.xml36
-rw-r--r--LoopbackApp/app/src/main/res/layout/main_activity.xml246
-rw-r--r--LoopbackApp/app/src/main/res/layout/player_buffer_period_activity.xml46
-rw-r--r--LoopbackApp/app/src/main/res/layout/recorder_buffer_period_activity.xml (renamed from LoopbackApp/app/src/main/res/layout/buffer_period_activity.xml)24
-rw-r--r--LoopbackApp/app/src/main/res/layout/settings_activity.xml56
-rw-r--r--LoopbackApp/app/src/main/res/values/strings.xml57
-rw-r--r--LoopbackApp/build/intermediates/gradle_project_sync_data.binbin0 -> 602 bytes
-rw-r--r--LoopbackApp/local.properties12
55 files changed, 6042 insertions, 2110 deletions
diff --git a/LoopbackApp/.gitignore b/LoopbackApp/.gitignore
new file mode 100644
index 0000000..afbdab3
--- /dev/null
+++ b/LoopbackApp/.gitignore
@@ -0,0 +1,6 @@
+.gradle
+/local.properties
+/.idea/workspace.xml
+/.idea/libraries
+.DS_Store
+/build
diff --git a/LoopbackApp/.idea/compiler.xml b/LoopbackApp/.idea/compiler.xml
index 96cc43e..9a8b7e5 100644
--- a/LoopbackApp/.idea/compiler.xml
+++ b/LoopbackApp/.idea/compiler.xml
@@ -1,6 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
+ <option name="DEFAULT_COMPILER" value="Javac" />
<resourceExtensions />
<wildcardResourcePatterns>
<entry name="!?*.java" />
@@ -11,7 +12,6 @@
<entry name="!?*.flex" />
<entry name="!?*.kt" />
<entry name="!?*.clj" />
- <entry name="!?*.aj" />
</wildcardResourcePatterns>
<annotationProcessing>
<profile default="true" name="Default" enabled="false">
diff --git a/LoopbackApp/.idea/dictionaries/rago.xml b/LoopbackApp/.idea/dictionaries/rago.xml
new file mode 100644
index 0000000..ddf5134
--- /dev/null
+++ b/LoopbackApp/.idea/dictionaries/rago.xml
@@ -0,0 +1,3 @@
+<component name="ProjectDictionaryState">
+ <dictionary name="rago" />
+</component> \ No newline at end of file
diff --git a/LoopbackApp/.idea/misc.xml b/LoopbackApp/.idea/misc.xml
index e45faed..6dcbe11 100644
--- a/LoopbackApp/.idea/misc.xml
+++ b/LoopbackApp/.idea/misc.xml
@@ -1,5 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
+ <component name="EntryPointsManager">
+ <entry_points version="2.0" />
+ </component>
+ <component name="ProjectInspectionProfilesVisibleTreeState">
+ <entry key="Project Default">
+ <profile-state>
+ <expanded-state>
+ <State>
+ <id />
+ </State>
+ <State>
+ <id>Android Lint</id>
+ </State>
+ <State>
+ <id>Finalization issuesJava</id>
+ </State>
+ <State>
+ <id>Groovy</id>
+ </State>
+ <State>
+ <id>ImportsJava</id>
+ </State>
+ <State>
+ <id>JUnit issuesJava</id>
+ </State>
+ <State>
+ <id>Java</id>
+ </State>
+ <State>
+ <id>Memory issuesJava</id>
+ </State>
+ <State>
+ <id>Probable bugsGroovy</id>
+ </State>
+ </expanded-state>
+ <selected-state>
+ <State>
+ <id>Android</id>
+ </State>
+ </selected-state>
+ </profile-state>
+ </entry>
+ </component>
<component name="ProjectLevelVcsManager" settingsEditedManually="false">
<OptionsSetting value="true" id="Add" />
<OptionsSetting value="true" id="Remove" />
@@ -10,10 +53,26 @@
<ConfirmationsSetting value="0" id="Add" />
<ConfirmationsSetting value="0" id="Remove" />
</component>
- <component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" default="true" assert-keyword="true" jdk-15="true" project-jdk-name="1.7" project-jdk-type="JavaSDK">
+ <component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" assert-keyword="true" jdk-15="true" project-jdk-name="1.7" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
<option name="id" value="Android" />
</component>
+ <component name="masterDetails">
+ <states>
+ <state key="ProjectJDKs.UI">
+ <settings>
+ <last-edited>1.7</last-edited>
+ <splitter-proportions>
+ <option name="proportions">
+ <list>
+ <option value="0.2" />
+ </list>
+ </option>
+ </splitter-proportions>
+ </settings>
+ </state>
+ </states>
+ </component>
</project> \ No newline at end of file
diff --git a/LoopbackApp/.idea/runConfigurations.xml b/LoopbackApp/.idea/runConfigurations.xml
new file mode 100644
index 0000000..7f68460
--- /dev/null
+++ b/LoopbackApp/.idea/runConfigurations.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+ <component name="RunConfigurationProducerService">
+ <option name="ignoredProducers">
+ <set>
+ <option value="org.jetbrains.plugins.gradle.execution.test.runner.AllInPackageGradleConfigurationProducer" />
+ <option value="org.jetbrains.plugins.gradle.execution.test.runner.TestClassGradleConfigurationProducer" />
+ <option value="org.jetbrains.plugins.gradle.execution.test.runner.TestMethodGradleConfigurationProducer" />
+ </set>
+ </option>
+ </component>
+</project> \ No newline at end of file
diff --git a/LoopbackApp/LoopbackApp.iml b/LoopbackApp/LoopbackApp.iml
index e257fad..28c62be 100644
--- a/LoopbackApp/LoopbackApp.iml
+++ b/LoopbackApp/LoopbackApp.iml
@@ -8,7 +8,7 @@
</configuration>
</facet>
</component>
- <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_7" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.gradle" />
diff --git a/LoopbackApp/app/.gitignore b/LoopbackApp/app/.gitignore
new file mode 100644
index 0000000..796b96d
--- /dev/null
+++ b/LoopbackApp/app/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/LoopbackApp/app/app.iml b/LoopbackApp/app/app.iml
index 76a0905..bf41d69 100644
--- a/LoopbackApp/app/app.iml
+++ b/LoopbackApp/app/app.iml
@@ -12,10 +12,12 @@
<option name="SELECTED_TEST_ARTIFACT" value="_android_test_" />
<option name="ASSEMBLE_TASK_NAME" value="assembleDebug" />
<option name="COMPILE_JAVA_TASK_NAME" value="compileDebugSources" />
- <option name="SOURCE_GEN_TASK_NAME" value="generateDebugSources" />
<option name="ASSEMBLE_TEST_TASK_NAME" value="assembleDebugAndroidTest" />
<option name="COMPILE_JAVA_TEST_TASK_NAME" value="compileDebugAndroidTestSources" />
- <option name="TEST_SOURCE_GEN_TASK_NAME" value="generateDebugAndroidTestSources" />
+ <afterSyncTasks>
+ <task>generateDebugAndroidTestSources</task>
+ <task>generateDebugSources</task>
+ </afterSyncTasks>
<option name="ALLOW_USER_CONFIGURATION" value="false" />
<option name="MANIFEST_FILE_RELATIVE_PATH" value="/src/main/AndroidManifest.xml" />
<option name="RES_FOLDER_RELATIVE_PATH" value="/src/main/res" />
@@ -24,7 +26,7 @@
</configuration>
</facet>
</component>
- <component name="NewModuleRootManager" inherit-compiler-output="false">
+ <component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_7" inherit-compiler-output="false">
<output url="file://$MODULE_DIR$/build/intermediates/classes/debug" />
<output-test url="file://$MODULE_DIR$/build/intermediates/classes/androidTest/debug" />
<exclude-output />
diff --git a/LoopbackApp/app/build.gradle b/LoopbackApp/app/build.gradle
index a67c80d..b1172f4 100644
--- a/LoopbackApp/app/build.gradle
+++ b/LoopbackApp/app/build.gradle
@@ -21,4 +21,6 @@ android {
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}
+
+
}
diff --git a/LoopbackApp/app/src/main/AndroidManifest.xml b/LoopbackApp/app/src/main/AndroidManifest.xml
index 8634d03..4dae4a7 100644
--- a/LoopbackApp/app/src/main/AndroidManifest.xml
+++ b/LoopbackApp/app/src/main/AndroidManifest.xml
@@ -23,34 +23,37 @@
xmlns:android="http://schemas.android.com/apk/res/android"
package="org.drrickorang.loopback"
- android:versionCode="6"
- android:versionName="0.4">
+ android:versionCode="7"
+ android:versionName="0.5">
<uses-sdk
- android:minSdkVersion="11"
+ android:minSdkVersion="18"
android:targetSdkVersion="21"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.STORAGE" />
+ <uses-permission android:name="android.permission.CAPTURE_AUDIO_OUTPUT" />
<application
android:label="@string/app_name"
android:icon="@drawable/ic_launcher"
- android:name="LoopbackApplication"
- >
+ android:name="LoopbackApplication">
<activity
android:name="org.drrickorang.loopback.LoopbackActivity"
android:screenOrientation="sensorPortrait"
android:theme="@android:style/Theme.Holo.Light"
- android:configChanges="orientation|keyboardHidden|screenLayout">
+ android:configChanges="orientation|keyboardHidden|screenLayout"
+ android:launchMode="singleTop">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
+ <service android:name=".AudioTestService" />
+
<activity
android:name="org.drrickorang.loopback.SettingsActivity"
android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
@@ -73,8 +76,28 @@
</activity>
<activity
- android:name="org.drrickorang.loopback.BufferPeriodActivity"
- android:label="Buffer Period Histogram"
+ android:name=".RecorderBufferPeriodActivity"
+ android:label="Recorder Buffer Period Histogram"
+ android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
+ android:theme="@android:style/Theme.Holo.Light">
+ <meta-data
+ android:name="android.support.PARENT_ACTIVITY"
+ android:value="org.drrickorang.loopback.LoopbackActivity" />
+ </activity>
+
+ <activity
+ android:name=".PlayerBufferPeriodActivity"
+ android:label="Player Buffer Period Histogram"
+ android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
+ android:theme="@android:style/Theme.Holo.Light">
+ <meta-data
+ android:name="android.support.PARENT_ACTIVITY"
+ android:value="org.drrickorang.loopback.LoopbackActivity" />
+ </activity>
+
+ <activity
+ android:name="org.drrickorang.loopback.GlitchesActivity"
+ android:label="List of Glitches"
android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
android:theme="@android:style/Theme.Holo.Light">
<meta-data
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/.DS_Store b/LoopbackApp/app/src/main/java/org/drrickorang/.DS_Store
deleted file mode 100644
index 4a310a6..0000000
--- a/LoopbackApp/app/src/main/java/org/drrickorang/.DS_Store
+++ /dev/null
Binary files differ
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
index a1ca297..83dfdfb 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
@@ -17,55 +17,24 @@
package org.drrickorang.loopback;
import android.app.Activity;
-import android.content.Intent;
import android.os.Bundle;
-import android.text.Html;
-import android.text.method.LinkMovementMethod;
+import android.view.View;
import android.widget.TextView;
+
/**
- * Created by ninatai on 5/11/15.
+ * This activity shows information related to this application.
*/
-public class AboutActivity extends Activity {
- public void onCreate(Bundle savedInstanceState) {
-
- super.onCreate(savedInstanceState);
-
- Intent intent = getIntent();
- String message1 = "Audio latency testing app using the Dr. Rick O'Rang audio loopback dongle.\n\n" +
- "Author: Ricardo Garcia and Tzu-Yin Tai\n\n" +
- "Open source project on:\n";
- String message2 = "https://github.com/gkasten/drrickorang\n\n";
- //"References:\n" +
- //"https://source.android.com/devices/audio/loopback.html\n" +
- //"https://source.android.com/devices/audio/latency_measure.html#loopback";
-
- // Create the text view
- //TextView textView = new TextView(this);
- //TextView t2 = (TextView) findViewById(R.id.text2);
- //t2.setTextSize(15);
- //t2.setMovementMethod(LinkMovementMethod.getInstance());
+public class AboutActivity extends Activity {
- //textView.setText(message1 + message2);
- TextView t3 = new TextView(this);
- t3.setTextSize(17);
- t3.setText(Html.fromHtml("Round-trip audio latency testing app" + "<br />" +
- "using the Dr. Rick O'Rang" + "<br />" +
- "audio loopback dongle." + "<br />" + "<br />" +
- "Authors: Ricardo Garcia and Tzu-Yin Tai" + "<br />" + "<br />" +
-/*
- "Open source project on:" + "<br />" +
- "<a href=\"https://github.com/gkasten/drrickorang\">https://github.com/gkasten/drrickorang</a>" + "<br />" + "<br />" +
-*/
- "References:" + "<br />" +
- "<a href=\"https://source.android.com/devices/audio/latency.html\">https://source.android.com/devices/audio/latency.html</a>" + "<br />" +
- "<a href=\"https://goo.gl/dxcw0d\">https://goog.gl/dxcw0d</a>"+
- "<br />" + "<br />"));
- t3.setMovementMethod(LinkMovementMethod.getInstance());
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
- // Set the text view as the activity layout
- setContentView(t3);
+ // FIXME spaces in xml not showing up as expected, so the displayed text may look unaligned
+ View view = getLayoutInflater().inflate(R.layout.about_activity, null);
+ setContentView(view);
}
+
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
index bc8343e..b78f04a 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
@@ -16,54 +16,55 @@
package org.drrickorang.loopback;
-import android.os.Environment;
-import android.os.ParcelFileDescriptor;
-import android.util.Log;
import java.io.FileDescriptor;
import java.io.FileOutputStream;
-import java.io.FileInputStream;
import java.io.IOException;
-import java.io.OutputStream;
-import java.io.File;
-import android.net.Uri;
-import android.content.Context;
import java.util.Arrays;
+import android.content.Context;
+import android.net.Uri;
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+
+
/**
- *
+ * This class is used to save the results to a .wav file.
*/
+
public class AudioFileOutput {
+ private static final String TAG = "AudioFileOutput";
- private static final String LOGTAG = "LoopbackWrite";
- private Uri mUri;
- private Context mContext;
+ private Uri mUri;
+ private Context mContext;
private FileOutputStream mOutputStream;
- private FileDescriptor mFileDescriptor;
- private int mSamplingRate = 48000;
+ private final int mSamplingRate;
+
+
public AudioFileOutput(Context context, Uri uri, int samplingRate) {
mContext = context;
mUri = uri;
mSamplingRate = samplingRate;
}
- public boolean writeData(double [] data) {
+
+ public boolean writeData(double[] data) {
boolean status = false;
ParcelFileDescriptor parcelFileDescriptor = null;
try {
parcelFileDescriptor =
mContext.getContentResolver().openFileDescriptor(mUri, "w");
- mFileDescriptor = parcelFileDescriptor.getFileDescriptor();
- mOutputStream = new FileOutputStream(mFileDescriptor);
+ FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+ mOutputStream = new FileOutputStream(fileDescriptor);
log("Done creating output stream");
int sampleCount = data.length;
writeHeader(sampleCount);
- writeDataBufer(data);
+ writeDataBuffer(data);
mOutputStream.close();
status = true;
parcelFileDescriptor.close();
} catch (Exception e) {
mOutputStream = null;
- log("Failed to open wavefile" +e);
+ log("Failed to open wavefile" + e);
} finally {
try {
if (parcelFileDescriptor != null) {
@@ -77,6 +78,7 @@ public class AudioFileOutput {
return status;
}
+
private void writeHeader(int samples) {
if (mOutputStream != null) {
try {
@@ -85,75 +87,77 @@ public class AudioFileOutput {
int bitsPerSample = 16;
byte[] chunkSize = new byte[4];
byte[] dataSize = new byte[4];
- int tempChunkSize = samples*2 + 36;
+ int tempChunkSize = (samples * 2) + 36;
chunkSize[3] = (byte) (tempChunkSize >> 24);
chunkSize[2] = (byte) (tempChunkSize >> 16);
chunkSize[1] = (byte) (tempChunkSize >> 8);
chunkSize[0] = (byte) tempChunkSize;
- int tempDataSize = samples*2;
+ int tempDataSize = samples * 2;
dataSize[3] = (byte) (tempDataSize >> 24);
dataSize[2] = (byte) (tempDataSize >> 16);
dataSize[1] = (byte) (tempDataSize >> 8);
dataSize[0] = (byte) tempDataSize;
byte[] header = new byte[] {
- 'R', 'I', 'F', 'F',
- chunkSize[0], chunkSize[1], chunkSize[2], chunkSize[3],
- 'W', 'A', 'V', 'E',
- 'f', 'm', 't', ' ',
- 16, 0, 0, 0,
- 1, 0, // PCM
- (byte) channels, 0, // number of channels
- (byte) mSamplingRate, (byte) (mSamplingRate >> 8), 0, 0, // sample rate
- 0, 0, 0, 0, // byte rate
- (byte) (channels * blockAlignment),
- 0, // block alignment
- (byte) bitsPerSample,
- 0, // bits per sample
- 'd', 'a', 't', 'a',
- dataSize[0], dataSize[1], dataSize[2], dataSize[3],
+ 'R', 'I', 'F', 'F',
+ chunkSize[0], chunkSize[1], chunkSize[2], chunkSize[3],
+ 'W', 'A', 'V', 'E',
+ 'f', 'm', 't', ' ',
+ 16, 0, 0, 0,
+ 1, 0, // PCM
+ (byte) channels, 0, // number of channels
+ (byte) mSamplingRate, (byte) (mSamplingRate >> 8), 0, 0, // sample rate
+ 0, 0, 0, 0, // byte rate
+ (byte) (channels * blockAlignment),
+ 0, // block alignment
+ (byte) bitsPerSample,
+ 0, // bits per sample
+ 'd', 'a', 't', 'a',
+ dataSize[0], dataSize[1], dataSize[2], dataSize[3],
};
mOutputStream.write(header);
log("Done writing header");
} catch (IOException e) {
- Log.e(LOGTAG, "Error writing header "+e);
+ Log.e(TAG, "Error writing header " + e);
}
}
}
- private void writeDataBufer(double [] data) {
+
+ private void writeDataBuffer(double [] data) {
if (mOutputStream != null) {
try {
int sampleCount = data.length;
int bufferSize = 1024; //blocks of 1024 samples
- byte [] buffer = new byte[bufferSize*2];
- double maxval = Math.pow(2, 15);
- for (int ii=0; ii<sampleCount; ii +=bufferSize) {
+ byte [] buffer = new byte[bufferSize * 2];
+
+ for (int ii = 0; ii < sampleCount; ii += bufferSize) {
//clear buffer
- Arrays.fill( buffer, (byte)0);
- int bytesUsed =0;
- for (int jj=0; jj<bufferSize; jj++) {
- int index = ii+jj;
- if (index>=sampleCount)
+ Arrays.fill(buffer, (byte) 0);
+ int bytesUsed = 0;
+ for (int jj = 0; jj < bufferSize; jj++) {
+ int index = ii + jj;
+ if (index >= sampleCount)
break;
- int value = (int) Math.round( data[index]*maxval );
- byte ba =(byte)( 0xFF &(value >>8));
- byte bb = (byte) ( 0xFF &(value));
- buffer[jj*2+1] = ba;
- buffer[jj*2] = bb;
- bytesUsed +=2;
+ int value = (int) Math.round(data[index] * Short.MAX_VALUE);
+ byte ba = (byte) (0xFF & (value >> 8)); //little-endian
+ byte bb = (byte) (0xFF & (value));
+ buffer[(jj * 2) + 1] = ba;
+ buffer[jj * 2] = bb;
+ bytesUsed += 2;
}
mOutputStream.write(buffer, 0, bytesUsed);
-// log("writing samples:"+ii+"/"+ sampleCount);
}
log("Done writing data");
} catch (IOException e) {
- Log.e(LOGTAG, "Error writing data "+e);
+ Log.e(TAG, "Error writing data " + e);
}
}
}
+
private static void log(String msg) {
- Log.v(LOGTAG, msg);
+ Log.v(TAG, msg);
}
+
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java
new file mode 100644
index 0000000..329d62b
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Notification;
+import android.app.Service;
+import android.content.Intent;
+import android.os.Build;
+import android.os.IBinder;
+import android.os.Binder;
+import android.util.Log;
+
+
+/**
+ * This is the Service being created during the first onStart() in the activity.
+ * Threads that are needed for the test will be created under this Service.
+ * At the end of the test, this Service will pass the test results back to LoopbackActivity.
+ */
+
+public class AudioTestService extends Service {
+ private static final String TAG = "AudioTestService";
+
+ private final IBinder mBinder = new AudioTestBinder();
+
+
+ @Override
+ public void onCreate() {
+ runAsForegroundService();
+ log("Audio Test Service created!");
+ }
+
+
+ @Override
+ public int onStartCommand(Intent intent, int flags, int startId) {
+ log("Service onStartCommand: " + startId);
+ //runAsForegroundService();
+ return Service.START_NOT_STICKY;
+ }
+
+
+ /**
+ * This method will run the Service as Foreground Service, so the Service won't be killed
+ * and restarted after a while.
+ */
+ private void runAsForegroundService() {
+ int notificationId = 1400;
+ Notification.Builder builder = new Notification.Builder(this)
+ .setSmallIcon(R.drawable.ic_launcher).setContentTitle("Loopback App")
+ .setContentText("Please disregard me.");
+ Notification notification;
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
+ notification = builder.getNotification();
+ } else {
+ notification = builder.build();
+ }
+
+ startForeground(notificationId, notification);
+ }
+
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ log("Service onBind");
+ return mBinder;
+ }
+
+
+ @Override
+ public void onDestroy() {
+ log("Service onDestroy");
+ }
+
+
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+
+ /**
+ * This class is only used by AudioTestService to create a binder that passes the
+ * AudioTestService back to LoopbackActivity.
+ */
+ public class AudioTestBinder extends Binder {
+ AudioTestService getService() {
+ return AudioTestService.this;
+ }
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
index ee9e01d..ccfab52 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
@@ -16,104 +16,105 @@
package org.drrickorang.loopback;
-/**
- * Created by ninatai on 5/12/15.
- */
+import java.util.Arrays;
import android.util.Log;
-import org.drrickorang.loopback.LoopbackAudioThread.RecorderRunnable;
-import java.util.Arrays;
-import java.util.HashMap;
+/**
+ * This class records the buffer period of the audio player or recorder when in Java mode.
+ * Currently the accuracy is in 1ms.
+ */
-// TODO add record for native audio thread
+//TODO for native mode, should use a scale more accurate than the current 1ms
public class BufferPeriod {
- private static long mPreviousTime = 0;
- private static long mCurrentTime = 0;
- private static final int range = 102; //TODO adjust this value
- private static int mMaxBufferPeriod = 0;
- private static boolean exceedRange = false;
- private static int mCount = 0;
- private static int mDiscard = 5; // discard the first few buffer period values
+ private static final String TAG = "BufferPeriod";
+
+ private long mStartTimeNs = 0; // first time collectBufferPeriod() is called
+ private long mPreviousTimeNs = 0;
+ private long mCurrentTimeNs = 0;
- private static int[] mJavaBufferPeriod = new int[range];
+ private int mMaxBufferPeriod = 0;
+ private int mCount = 0;
+ private final int range = 1002; // store counts for 0ms to 1000ms, and for > 1000ms
+ private int[] mBufferPeriod = new int[range];
+ private int[] mBufferPeriodTimeStamp = new int[range];
- public static void collectBufferPeriod() {
- mCurrentTime = System.nanoTime();
- mCount += 1;
- // if = 0 it's the first time the thread runs, so don't record the interval
- if (mPreviousTime != 0 && mCurrentTime != 0 && mCount > mDiscard) {
- long diffInNano = mCurrentTime - mPreviousTime;
- int diffInMilli = (int) Math.ceil(( ((double)diffInNano / 1000000))); // round up
+ /**
+ * For player, this function is called before every AudioTrack.write().
+ * For recorder, this function is called after every AudioRecord.read() with read > 0.
+ */
+ public void collectBufferPeriod() {
+ mCurrentTimeNs = System.nanoTime();
+ mCount++;
+
+ // if mPreviousTimeNs = 0, it's the first time this function is called
+ if (mPreviousTimeNs == 0) {
+ mStartTimeNs = mCurrentTimeNs;
+ }
+
+ final int discard = 10; // discard the first few buffer period values
+ if (mPreviousTimeNs != 0 && mCount > discard) {
+ long diffInNano = mCurrentTimeNs - mPreviousTimeNs;
+ // diffInMilli is rounded up
+ int diffInMilli = (int) ((diffInNano + Constant.NANOS_PER_MILLI - 1) /
+ Constant.NANOS_PER_MILLI);
+
+ long timeStampInNano = mCurrentTimeNs - mStartTimeNs;
+ int timeStampInMilli = (int) ((timeStampInNano + Constant.NANOS_PER_MILLI - 1) /
+ Constant.NANOS_PER_MILLI);
if (diffInMilli > mMaxBufferPeriod) {
mMaxBufferPeriod = diffInMilli;
}
- // from 0 ms to 1000 ms, plus a sum of all instances > 1000ms
- if (diffInMilli >= 0 && diffInMilli < (range - 1)) {
- mJavaBufferPeriod[diffInMilli] += 1;
- } else if (diffInMilli >= (range - 1)) {
- mJavaBufferPeriod[range-1] += 1;
- } else if (diffInMilli < 0) {
- // throw new IllegalBufferPeriodException("BufferPeriod must be >= 0");
- errorLog("Having negative BufferPeriod.");
+ // from 0 ms to 1000 ms, plus a sum of all occurrences > 1000ms
+ if (diffInMilli >= (range - 1)) {
+ mBufferPeriod[range - 1]++;
+ mBufferPeriodTimeStamp[range - 1] = timeStampInMilli;
+ } else if (diffInMilli >= 0) {
+ mBufferPeriod[diffInMilli]++;
+ mBufferPeriodTimeStamp[diffInMilli] = timeStampInMilli;
+ } else { // for diffInMilli < 0
+ log("Having negative BufferPeriod.");
}
}
- mPreviousTime = mCurrentTime;
+ mPreviousTimeNs = mCurrentTimeNs;
}
- // Check if max BufferPeriod exceeds the range of latencies that are going to be displayed on histogram
- public static void setExceedRange() {
- if (mMaxBufferPeriod > (range - 2)) {
- exceedRange = true;
- } else {
- exceedRange = false;
- }
- }
- public static void resetRecord() {
- mPreviousTime = 0;
- mCurrentTime = 0;
- Arrays.fill(mJavaBufferPeriod, 0);
+ /** Reset all variables, called if wants to start a new buffer period's record. */
+ public void resetRecord() {
+ mPreviousTimeNs = 0;
+ mCurrentTimeNs = 0;
+ Arrays.fill(mBufferPeriodTimeStamp, 0);
+ Arrays.fill(mBufferPeriod, 0);
mMaxBufferPeriod = 0;
mCount = 0;
-
}
- public static int[] getBufferPeriodArray() {
- return mJavaBufferPeriod;
-
- }
- public static int getMaxBufferPeriod() {
- return mMaxBufferPeriod;
+ public int[] getBufferPeriodArray() {
+ return mBufferPeriod;
}
-
-
- private static void errorLog(String msg) {
- Log.e("BufferPeriodTracker", msg);
+ public int[] getBufferPeriodTimeStampArray() {
+ return mBufferPeriodTimeStamp;
}
- private static void log(String msg) {
- Log.v("BufferPeriodTracker", msg);
- }
-
- public static class IllegalBufferPeriodException extends Exception {
- public IllegalBufferPeriodException(String message)
- {
- super(message);
- }
+ public int getMaxBufferPeriod() {
+ return mMaxBufferPeriod;
}
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java
new file mode 100644
index 0000000..de84b1f
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This file stores constants that are used across multiple files.
+ */
+
+public class Constant {
+ public static final double TWO_PI = 2.0 * Math.PI;
+ public static final long NANOS_PER_MILLI = 1000000;
+ public static final int MILLIS_PER_SECOND = 1000;
+
+ public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY = 222;
+ public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD = 223;
+
+ public static final int AUDIO_THREAD_TYPE_JAVA = 0;
+ public static final int AUDIO_THREAD_TYPE_NATIVE = 1;
+
+ public static final int BYTES_PER_SHORT = 2;
+ public static final int SHORTS_PER_INT = 2;
+ public static final int BYTES_PER_FRAME = 2; // bytes per sample
+
+ // prime numbers that don't overlap with FFT frequencies
+ public static final double PRIME_FREQUENCY_1 = 703.0;
+ public static final double PRIME_FREQUENCY_2 = 719.0;
+
+ // amplitude for ToneGeneration
+ public static final double SINE_WAVE_AMPLITUDE = 0.8;
+ public static final double TWO_SINE_WAVES_AMPLITUDE = 0.4;
+
+ // the number used to configured PipeShort/PipeByteBuffer
+ public static final int MAX_SHORTS = 65536;
+
+ // used to identify a variable is currently unknown
+ public static final int UNKNOWN = -1;
+
+ // used when joining a thread
+ public static final int JOIN_WAIT_TIME_MS = 1000;
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
index 062341c..8cb8479 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
@@ -16,22 +16,24 @@
package org.drrickorang.loopback;
-import android.os.Trace;
import android.util.Log;
+
/**
- * Created by rago on 5/8/15.
+ * This class is used to automatically estimate latency and its confidence.
*/
+
public class Correlation {
+ private static final String TAG = "Correlation";
- private int mBlockSize = 4096;
- private int mSamplingRate = 44100;
+ private int mBlockSize = 4096;
+ private int mSamplingRate;
private double [] mDataDownsampled = new double [mBlockSize];
private double [] mDataAutocorrelated = new double[mBlockSize];
public double mEstimatedLatencySamples = 0;
public double mEstimatedLatencyMs = 0;
-
+ public double mEstimatedLatencyConfidence = 0.0;
public void init(int blockSize, int samplingRate) {
@@ -39,8 +41,9 @@ public class Correlation {
mSamplingRate = samplingRate;
}
+
public boolean computeCorrelation(double [] data, int samplingRate) {
- boolean status = false;
+ boolean status;
log("Started Auto Correlation for data with " + data.length + " points");
mSamplingRate = samplingRate;
@@ -57,36 +60,51 @@ public class Correlation {
int maxIndex = -1;
double minLatencyMs = 8; //min latency expected. This algorithm should be improved.
- int minIndex = (int)(0.5 + minLatencyMs * mSamplingRate / (groupSize*1000));
+ int minIndex = (int) (0.5 + minLatencyMs * mSamplingRate / (groupSize * 1000));
+
+ double average = 0;
+ double rms = 0;
//find max
- for(int i=minIndex; i<mDataAutocorrelated.length; i++) {
- if(mDataAutocorrelated[i] > maxValue) {
+ for (int i = minIndex; i < mDataAutocorrelated.length; i++) {
+ average += mDataAutocorrelated[i];
+ rms += mDataAutocorrelated[i] * mDataAutocorrelated[i];
+ if (mDataAutocorrelated[i] > maxValue) {
maxValue = mDataAutocorrelated[i];
maxIndex = i;
}
}
- log(String.format(" Maxvalue %f, max Index : %d/%d (%d) minIndex=%d",maxValue, maxIndex, mDataAutocorrelated.length, data.length, minIndex));
-
+ rms = Math.sqrt(rms / mDataAutocorrelated.length);
+ average = average / mDataAutocorrelated.length;
+ log(String.format(" Maxvalue %f, max Index : %d/%d (%d) minIndex = %d", maxValue, maxIndex,
+ mDataAutocorrelated.length, data.length, minIndex));
+ log(String.format(" average : %.3f rms: %.3f", average, rms));
+ mEstimatedLatencyConfidence = 0.0;
+ if (average > 0) {
+ double factor = 3.0;
- mEstimatedLatencySamples = maxIndex*groupSize;
-
- mEstimatedLatencyMs = mEstimatedLatencySamples *1000/mSamplingRate;
+ double raw = (rms - average) / (factor * average);
+ log(String.format("Raw: %.3f", raw));
+ mEstimatedLatencyConfidence = Math.max(Math.min(raw, 1.0), 0.0);
+ }
+ log(String.format(" ****Confidence: %.2f", mEstimatedLatencyConfidence));
- log(String.format(" latencySamples: %.2f %.2f ms", mEstimatedLatencySamples, mEstimatedLatencyMs));
+ mEstimatedLatencySamples = maxIndex * groupSize;
+ mEstimatedLatencyMs = mEstimatedLatencySamples * 1000 / mSamplingRate;
+ log(String.format(" latencySamples: %.2f %.2f ms", mEstimatedLatencySamples,
+ mEstimatedLatencyMs));
status = true;
-
return status;
}
+
private boolean downsampleData(double [] data, double [] dataDownsampled) {
- boolean status = false;
- // mDataDownsampled = new double[mBlockSize];
- for (int i=0; i<mBlockSize; i++) {
+ boolean status;
+ for (int i = 0; i < mBlockSize; i++) {
dataDownsampled[i] = 0;
}
@@ -95,40 +113,35 @@ public class Correlation {
int currentIndex = 0;
double nextGroup = groupSize;
- //Trace.beginSection("Processing Correlation");
- for (int i = 0; i<N && currentIndex<mBlockSize; i++) {
+ for (int i = 0; i < N && currentIndex < mBlockSize; i++) {
- if(i> nextGroup) { //advanced to next group.
+ if (i > nextGroup) { //advanced to next group.
currentIndex++;
nextGroup += groupSize;
}
- if (currentIndex>=mBlockSize) {
+ if (currentIndex >= mBlockSize) {
break;
}
dataDownsampled[currentIndex] += Math.abs(data[i]);
}
- //Trace.endSection();
-
status = true;
-
return status;
}
+
private boolean autocorrelation(double [] data, double [] dataOut) {
boolean status = false;
double sumsquared = 0;
int N = data.length;
- for(int i=0; i<N; i++) {
+ for (int i = 0; i < N; i++) {
double value = data[i];
- sumsquared += value*value;
+ sumsquared += value * value;
}
- //dataOut = new double[N];
-
- if(sumsquared>0) {
+ if (sumsquared > 0) {
//correlate (not circular correlation)
for (int i = 0; i < N; i++) {
dataOut[i] = 0;
@@ -144,7 +157,8 @@ public class Correlation {
return status;
}
+
private static void log(String msg) {
- Log.v("Recorder", msg);
+ Log.v(TAG, msg);
}
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java
new file mode 100644
index 0000000..e69efb0
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class computes FFT of inputting data.
+ * Note: this part of code is originally from another project, so there's actually multiple copies
+ * of this code. Should somehow merge these copies in the future. Also, no modification on
+ * naming has been made, but naming should be changed once we merge all copies.
+ */
+
+public class FFT {
+ private int m;
+ private double[] cos; // precomputed cosine tables for FFT
+ private double[] sin; // precomputed sine tables for FFT
+ private final int mFFTSamplingSize;
+
+
+ FFT(int FFTSamplingSize) {
+ mFFTSamplingSize = FFTSamplingSize;
+ setUpFFT();
+ }
+
+
+ /** This function is only called in constructor to set up variables needed for computing FFT. */
+ private void setUpFFT() {
+ m = (int) (Math.log(mFFTSamplingSize) / Math.log(2));
+
+ // Make sure n is a power of 2
+ if (mFFTSamplingSize != (1 << m))
+ throw new RuntimeException("FFT sampling size must be power of 2");
+
+ // precomputed tables
+ cos = new double[mFFTSamplingSize / 2];
+ sin = new double[mFFTSamplingSize / 2];
+
+ for (int i = 0; i < mFFTSamplingSize / 2; i++) {
+ cos[i] = Math.cos(-2 * Math.PI * i / mFFTSamplingSize);
+ sin[i] = Math.sin(-2 * Math.PI * i / mFFTSamplingSize);
+ }
+ }
+
+
+ /**
+ * Do FFT, and store the result's real part to "x", imaginary part to "y".
+ */
+ public void fft(double[] x, double[] y, int sign) {
+ int i, j, k, n1, n2, a;
+ double c, s, t1, t2;
+
+ // Bit-reverse
+ j = 0;
+ n2 = mFFTSamplingSize / 2;
+ for (i = 1; i < mFFTSamplingSize - 1; i++) {
+ n1 = n2;
+ while (j >= n1) {
+ j = j - n1;
+ n1 = n1 / 2;
+ }
+ j = j + n1;
+
+ if (i < j) {
+ t1 = x[i];
+ x[i] = x[j];
+ x[j] = t1;
+ t1 = y[i];
+ y[i] = y[j];
+ y[j] = t1;
+ }
+ }
+
+ // FFT
+ n1 = 0;
+ n2 = 1;
+
+ for (i = 0; i < m; i++) {
+ n1 = n2;
+ n2 = n2 + n2;
+ a = 0;
+
+ for (j = 0; j < n1; j++) {
+ c = cos[a];
+ s = sign * sin[a];
+ a += 1 << (m - i - 1);
+
+ for (k = j; k < mFFTSamplingSize; k = k + n2) {
+ t1 = c * x[k + n1] - s * y[k + n1];
+ t2 = s * x[k + n1] + c * y[k + n1];
+ x[k + n1] = x[k] - t1;
+ y[k + n1] = y[k] - t2;
+ x[k] = x[k] + t1;
+ y[k] = y[k] + t2;
+ }
+ }
+ }
+ }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java
new file mode 100644
index 0000000..9ae5a93
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.util.Arrays;
+
+import android.util.Log;
+
+
+/**
+ * This thread is responsible for detecting glitches in the samples.
+ */
+
+public class GlitchDetectionThread extends Thread {
+ private static final String TAG = "GlitchDetectionThread";
+ // the acceptable difference between the expected center of mass and what we actually get
+ private static final double mAcceptablePercentDifference = 0.02; // change this if necessary
+
+
+ private boolean mIsRunning; // condition must be true for the thread to run
+ private short mShortBuffer[]; // keep the data read from Pipe
+ private int mShortBufferIndex = 0;
+ private Pipe mPipe;
+ private static int mThreadSleepDurationMs;
+
+ private double mDoubleBuffer[]; // keep the data used for FFT calculation
+ private boolean mIsFirstFFT = true; // whether or not it's the first FFT calculation
+ private double mWaveData[]; // data that will be plotted
+ private int mWaveDataIndex = 0;
+
+ private double mFrequency1;
+ private double mFrequency2; //currently not used
+ private int mSamplingRate;
+ private int mFFTSamplingSize; // amount of samples used to perform a FFT
+ private int mFFTOverlapSamples; // amount of overlapped samples used between two FFTs
+ private int mNewSamplesPerFFT; // amount of new samples (not from last FFT) in a FFT
+ private double mCenterOfMass; // expected center of mass of samples
+ private int[] mGlitches; // for every value = n, n is the nth FFT where a glitch is found
+ private int mGlitchesIndex;
+ private int mFFTCount; // store the current number of FFT performed
+ private FFT mFFT;
+ private boolean mGlitchingIntervalTooLong = false; // true if mGlitches is full
+
+
+ GlitchDetectionThread(double frequency1, double frequency2, int samplingRate,
+ int FFTSamplingSize, int FFTOverlapSamples, int bufferTestDurationInSeconds,
+ int bufferTestWavePlotDurationInSeconds, Pipe pipe) {
+ mPipe = pipe;
+ mFrequency1 = frequency1;
+ mFrequency2 = frequency2;
+ mFFTSamplingSize = FFTSamplingSize;
+ mFFTOverlapSamples = FFTOverlapSamples;
+ mNewSamplesPerFFT = mFFTSamplingSize - mFFTOverlapSamples;
+ mSamplingRate = samplingRate;
+ mIsRunning = true;
+
+ mShortBuffer = new short[mFFTSamplingSize];
+ mDoubleBuffer = new double[mFFTSamplingSize];
+ mWaveData = new double[mSamplingRate * bufferTestWavePlotDurationInSeconds];
+
+ final int acceptableGlitchingIntervalsPerSecond = 10;
+ mGlitches = new int[bufferTestDurationInSeconds * acceptableGlitchingIntervalsPerSecond];
+ Arrays.fill(mGlitches, 0);
+ mGlitchesIndex = 0;
+ mFFTCount = 1;
+
+ mFFT = new FFT(mFFTSamplingSize);
+ computeExpectedCenterOfMass();
+
+ mThreadSleepDurationMs = FFTOverlapSamples * Constant.MILLIS_PER_SECOND / mSamplingRate;
+ if (mThreadSleepDurationMs < 1) {
+ mThreadSleepDurationMs = 1; // sleeps at least 1ms
+ }
+ }
+
+
+ public void run() {
+ while (mIsRunning) {
+ int requiredRead;
+ int actualRead;
+
+ requiredRead = mFFTSamplingSize - mShortBufferIndex;
+ actualRead = mPipe.read(mShortBuffer, mShortBufferIndex, requiredRead);
+
+ if (actualRead > 0) {
+ mShortBufferIndex += actualRead;
+ }
+
+ if (actualRead == Pipe.OVERRUN) {
+ log("There's an overrun");
+ }
+
+ // Once we have enough data, we can do a FFT on it. Note that between two FFTs, part of
+ // the samples (of size mFFTOverlapSamples) are used in both FFTs .
+ if (mShortBufferIndex == mFFTSamplingSize) {
+ bufferShortToDouble(mShortBuffer, mDoubleBuffer);
+
+ // copy data in mDoubleBuffer to mWaveData
+ if (mIsFirstFFT) {
+ // if it's the first FFT, copy the whole "mNativeBuffer" to mWaveData
+ System.arraycopy(mDoubleBuffer, 0, mWaveData,
+ mWaveDataIndex, mFFTSamplingSize);
+ mWaveDataIndex += mFFTSamplingSize;
+ mIsFirstFFT = false;
+ } else {
+ // if mWaveData is all filled, clear it then starting writing from beginning.
+ //TODO make mWaveData into a circular buffer storing the last N seconds instead
+ if ((mWaveDataIndex + mNewSamplesPerFFT) >= mWaveData.length) {
+ Arrays.fill(mWaveData, 0);
+ mWaveDataIndex = 0;
+ }
+
+ // if it's not the first FFT, copy the new data in "mNativeBuffer" to mWaveData
+ System.arraycopy(mDoubleBuffer, mFFTOverlapSamples, mWaveData,
+ mWaveDataIndex, mNewSamplesPerFFT);
+ mWaveDataIndex += mFFTOverlapSamples;
+ }
+
+ detectGlitches();
+ // move new samples to the beginning of the array as they will be reused in next fft
+ System.arraycopy(mShortBuffer, mNewSamplesPerFFT, mShortBuffer,
+ 0, mFFTOverlapSamples);
+ mShortBufferIndex = mFFTOverlapSamples;
+ } else {
+ try {
+ sleep(mThreadSleepDurationMs);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ }
+
+
+ /** convert samples in shortBuffer to double, then copy into doubleBuffer. */
+ private void bufferShortToDouble(short[] shortBuffer, double[] doubleBuffer) {
+ double temp;
+ for (int i = 0; i < shortBuffer.length; i++) {
+ temp = (double) shortBuffer[i];
+ temp *= (1.0 / Short.MAX_VALUE);
+ doubleBuffer[i] = temp;
+ }
+ }
+
+
+ /** Should be called by other thread to stop this thread */
+ public void requestStop() {
+ mIsRunning = false;
+ interrupt();
+ }
+
+
+ /**
+ * Use the data in mDoubleBuffer to do glitch detection since we know what
+ * data we are expecting.
+ */
+ private void detectGlitches() {
+ double centerOfMass;
+ double[] fftResult;
+ double[] currentSamples;
+
+ currentSamples = Arrays.copyOfRange(mDoubleBuffer, 0, mDoubleBuffer.length);
+ currentSamples = Utilities.hanningWindow(currentSamples);
+ double width = (double) mSamplingRate / currentSamples.length;
+ fftResult = computeFFT(currentSamples); // gives an array of sampleSize / 2
+ final double threshold = 0.1;
+
+ // for all elements in the FFT result that are smaller than threshold,
+ // eliminate them as they are probably noise
+ for (int j = 0; j < fftResult.length; j++) {
+ if (fftResult[j] < threshold) {
+ fftResult[j] = 0;
+ }
+ }
+
+ // calculate the center of mass of sample's FFT
+ centerOfMass = computeCenterOfMass(fftResult, width);
+ double difference = (Math.abs(centerOfMass - mCenterOfMass) / mCenterOfMass);
+ if (mGlitchesIndex >= mGlitches.length) {
+ // we just want to show this log once and set the flag once.
+ if (!mGlitchingIntervalTooLong) {
+ log("Not enough room to store glitches!");
+ mGlitchingIntervalTooLong = true;
+ }
+ } else {
+ // centerOfMass == -1 if the wave we get is silence.
+ if (difference > mAcceptablePercentDifference || centerOfMass == -1) {
+ mGlitches[mGlitchesIndex] = mFFTCount;
+ mGlitchesIndex++;
+ }
+ }
+ mFFTCount++;
+ }
+
+
+ /** Compute the center of mass of fftResults. Width is the width of each beam. */
+ private double computeCenterOfMass(double[] fftResult, double width) {
+ int length = fftResult.length;
+ double weightedSum = 0;
+ double totalWeight = 0;
+ for (int i = 0; i < length; i++) {
+ weightedSum += fftResult[i] * i;
+ totalWeight += fftResult[i];
+ }
+
+ // this may happen since we are eliminating the noises. So if the wave we got is silence,
+ // totalWeight might == 0.
+ if (totalWeight == 0) {
+ return -1;
+ }
+
+ return (weightedSum * width) / totalWeight;
+ }
+
+
+ /** Compute FFT of a set of data "samples". */
+ private double[] computeFFT(double[] realArray) {
+ int length = realArray.length;
+ double[] imagArray = new double[length]; // all zeros
+ Arrays.fill(imagArray, 0);
+ mFFT.fft(realArray, imagArray, 1); // here realArray and imagArray get set
+
+ double[] absValue = new double[length / 2]; // don't use second portion of arrays
+
+ for (int i = 0; i < (length / 2); i++) {
+ absValue[i] = Math.sqrt(realArray[i] * realArray[i] + imagArray[i] * imagArray[i]);
+ }
+
+ return absValue;
+ }
+
+
+ /** Compute the center of mass if the samples have no glitches. */
+ private void computeExpectedCenterOfMass() {
+ SineWaveTone sineWaveTone = new SineWaveTone(mSamplingRate, mFrequency1);
+ double[] sineWave = new double[mFFTSamplingSize];
+ double centerOfMass;
+ double[] sineFFTResult;
+
+ sineWaveTone.generateTone(sineWave, mFFTSamplingSize);
+ sineWave = Utilities.hanningWindow(sineWave);
+ double width = (double) mSamplingRate / sineWave.length;
+
+ sineFFTResult = computeFFT(sineWave); // gives an array of sample sizes / 2
+ centerOfMass = computeCenterOfMass(sineFFTResult, width); // return center of mass
+ mCenterOfMass = centerOfMass;
+ log("the expected center of mass:" + Double.toString(mCenterOfMass));
+ }
+
+
+ public double[] getWaveData() {
+ return mWaveData;
+ }
+
+
+ public boolean getGlitchingIntervalTooLong() {
+ return mGlitchingIntervalTooLong;
+ }
+
+
+ public int[] getGlitches() {
+ return mGlitches;
+ }
+
+
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesActivity.java
new file mode 100644
index 0000000..0c31289
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesActivity.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.View;
+import android.widget.TextView;
+
+
+/**
+ * This activity shows a list of time intervals where a glitch occurs.
+ */
+
+public class GlitchesActivity extends Activity {
+ private static final String TAG = "GlitchesActivity";
+
+
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ View view = getLayoutInflater().inflate(R.layout.glitches_activity, null);
+ setContentView(view);
+
+ Bundle bundle = getIntent().getExtras();
+ int FFTSamplingSize = bundle.getInt("FFTSamplingSize");
+ int FFTOverlapSamples = bundle.getInt("FFTOverlapSamples");
+ int[] glitchesData = bundle.getIntArray("glitchesArray");
+ int samplingRate = bundle.getInt("samplingRate");
+ boolean glitchingIntervalTooLong = bundle.getBoolean("glitchingIntervalTooLong");
+ int newSamplesPerFFT = FFTSamplingSize - FFTOverlapSamples;
+ int numberOfGlitches = bundle.getInt("numberOfGlitches");
+
+ // the time span of new samples for a single FFT in ms
+ double newSamplesInMs = ((double) newSamplesPerFFT / samplingRate) *
+ Constant.MILLIS_PER_SECOND;
+ log("newSamplesInMs: " + Double.toString(newSamplesInMs));
+
+ // the time span of all samples for a single FFT in ms
+ double allSamplesInMs = ((double) FFTSamplingSize / samplingRate) *
+ Constant.MILLIS_PER_SECOND;
+ log("allSamplesInMs: " + Double.toString(allSamplesInMs));
+
+ StringBuilder listOfGlitches = new StringBuilder();
+ listOfGlitches.append("Total Glitching Interval too long: " +
+ glitchingIntervalTooLong + "\n");
+ listOfGlitches.append("Estimated number of glitches: " + numberOfGlitches + "\n");
+ listOfGlitches.append("List of glitching intervals: \n");
+
+ int timeInMs; // starting time of glitches
+ for (int i = 0; i < glitchesData.length; i++) {
+ //log("glitchesData" + i + " :" + glitchesData[i]);
+ if (glitchesData[i] > 0) {
+ //append the time of glitches to "listOfGlitches"
+ timeInMs = (int) ((glitchesData[i] - 1) * newSamplesInMs); // round down
+ listOfGlitches.append(Integer.toString(timeInMs) + "~" +
+ Integer.toString(timeInMs + (int) allSamplesInMs) + "ms\n");
+ }
+ }
+
+
+
+ // Set the textView
+ TextView textView = (TextView) findViewById(R.id.GlitchesInfo);
+ textView.setTextSize(12);
+ textView.setText(listOfGlitches.toString());
+ }
+
+
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
index 570a539..4c99b39 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
@@ -22,41 +22,57 @@ import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
+import android.util.Log;
import android.view.View;
+
/**
- * Created by ninatai on 5/14/15.
+ * This is the histogram used to show recorder/player buffer period.
*/
+
public class HistogramView extends View {
+ private static final String TAG = "HistogramView";
+
+
private Paint mHistPaint;
private Paint mTextPaint;
private Paint mLinePaint;
private Paint mXLabelPaint;
- private static int[] mData;
- private static int mMaxBufferPeriod = 0;
- private static boolean mExceedRange = false;
- private int mBase = 10; //base of logarithm
- private int mNumberOfXLabel = 4;
- private int mYLabelSize = 30;
- private int mXLabelSize = 22;
- private int mLineWidth = 3;
- private int mHistogramInterval = 2; // separate each beam in the histogram by such amount
- int mExtraYMargin = 5; // the extra margin between y labels and y-axis
+ private int[] mTimeStampData;
+ private int[] mDisplayTimeStampData;
+
+ private int[] mData; // data for buffer period
+ private int[] mDisplayData; // modified data that is used to draw histogram
+ private int mMaxBufferPeriod = 0;
+ // number of x-axis labels excluding the last x-axis label
+ private int mNumberOfXLabel = 5; // mNumberOfXLabel must > 0
+
+ private final int mYAxisBase = 10; // base of y-axis log scale
+ private final int mYLabelSize = 40;
+ private final int mXLabelSize = 40;
+ private final int mLineWidth = 3;
+ private final int mMaxNumberOfBeams = 202; // the max amount of beams to display on the screen
+
+ // Note: if want to change this to base other than 10, must change the way x labels are
+ // displayed. It's currently half-hardcoded.
+ private final int mBucketBase = 10; // a bucket's range
+
public HistogramView(Context context, AttributeSet attrs) {
super(context, attrs);
- init();
+ initPaints();
}
- // initiate once for optimization
- private void init() {
+
+ /** Initiate all the Paint objects. */
+ private void initPaints() {
mHistPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mHistPaint.setStyle(Paint.Style.FILL);
mHistPaint.setColor(Color.BLUE);
mTextPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
- mTextPaint.setColor(Color.RED);
+ mTextPaint.setColor(Color.BLACK);
mTextPaint.setTextSize(mYLabelSize);
mXLabelPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
@@ -68,41 +84,126 @@ public class HistogramView extends View {
mLinePaint.setStrokeWidth(mLineWidth);
}
+
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
-
canvas.drawColor(Color.GRAY);
- int arrayLength = mData.length;
- if (mData == null || arrayLength == 0) {
+
+ if (mData == null || mData.length == 0) {
return;
}
+ int arrayLength = mData.length;
+ boolean exceedBufferPeriodRange;
if (mMaxBufferPeriod != 0) {
-
- // the range of latencies that's going to be displayed on histogram
- int range;
+ final int extraYMargin = 5; // the extra margin between y labels and y-axis
+ final int beamInterval = 2; // separate each beam in the histogram by such amount
+ int range; // the number of beams that's going to be displayed on histogram
if (mMaxBufferPeriod > arrayLength - 1) {
range = arrayLength;
- mExceedRange = true;
+ exceedBufferPeriodRange = true;
} else {
range = mMaxBufferPeriod + 1;
- mExceedRange = false;
+ exceedBufferPeriodRange = false;
}
if (range == 0) {
return;
}
- // coordinate starts at (0,0), up to (right, bottom)
+ boolean isUsingDisplayData = false;
+ int oldRange = range;
+ int interval = 1;
+
+ // if there are more beams than allowed to be displayed on screen,
+ // put beams into buckets
+ if (range > mMaxNumberOfBeams) {
+ isUsingDisplayData = true;
+ int bucketOrder = 0;
+ if (exceedBufferPeriodRange) { // there should be one extra beam for 101+
+ range -= 2;
+ while (range > mMaxNumberOfBeams - 2) {
+ range /= mBucketBase;
+ bucketOrder++;
+ }
+ range += 2; // assuming always XXX1+, not something like 0~473, 474+.
+
+ } else {
+ range--;
+ int temp = range;
+ while (range > mMaxNumberOfBeams - 2) {
+ range /= mBucketBase;
+ bucketOrder++;
+ }
+
+ if ((temp % mBucketBase) != 0) {
+ range += 2;
+ } else {
+ range++;
+ }
+ }
+
+ interval = (int) Math.pow(mBucketBase, bucketOrder);
+ mDisplayData = new int[mMaxNumberOfBeams];
+ mDisplayData[0] = mData[0];
+
+ // putting data into buckets.
+ for (int i = 1; i < (range - 1); i++) {
+ for (int j = (((i - 1) * interval) + 1); (j <= (i * interval)); j++) {
+ mDisplayData[i] += mData[j];
+ }
+ }
+
+ if (exceedBufferPeriodRange) {
+ mDisplayData[range - 1] = mData[oldRange - 1];
+ } else {
+ for (int i = (((range - 2) * interval) + 1); i < oldRange; i++) {
+ mDisplayData[range - 1] += mData[i];
+ }
+ }
+
+ // for timestamp
+ if (mTimeStampData != null) {
+ mDisplayTimeStampData = new int[mMaxNumberOfBeams];
+ mDisplayTimeStampData[0] = mTimeStampData[0];
+ // find the max timestamp of each bucket.
+ for (int i = 1; i < (range - 1); i++) {
+ int maxTimeStamp = mTimeStampData[(((i - 1) * interval) + 1)];
+ for (int j = (((i - 1) * interval) + 1); (j <= (i * interval)); j++) {
+ if (mTimeStampData[j] > maxTimeStamp)
+ maxTimeStamp = mTimeStampData[j];
+ }
+ mDisplayTimeStampData[i] = maxTimeStamp;
+ }
+ if (exceedBufferPeriodRange) {
+ mDisplayTimeStampData[range - 1] = mTimeStampData[oldRange - 1];
+ } else {
+ int maxTimeStamp = mTimeStampData[(((range - 2) * interval) + 1)];
+ for (int i = (((range - 2) * interval) + 1); i < oldRange; i++) {
+ if (mTimeStampData[i] > maxTimeStamp) {
+ maxTimeStamp = mTimeStampData[i];
+ }
+ }
+ mDisplayTimeStampData[range - 1] = maxTimeStamp;
+ }
+ }
+
+ } else {
+ mDisplayData = mData;
+ mDisplayTimeStampData = mTimeStampData;
+ }
+
+
+ // coordinate starts at (0, 0), up to (right, bottom)
int right = this.getRight();
int bottom = this.getBottom();
// calculate the max frequency among all latencies
int maxBufferPeriodFreq = 0;
- for (int i = 1; i < arrayLength; i++) {
- if (mData[i] > maxBufferPeriodFreq) {
- maxBufferPeriodFreq = mData[i];
+ for (int i = 1; i < range; i++) {
+ if (mDisplayData[i] > maxBufferPeriodFreq) {
+ maxBufferPeriodFreq = mDisplayData[i];
}
}
@@ -110,112 +211,197 @@ public class HistogramView extends View {
return;
}
- // find the closest order of "mBase" according to maxBufferPeriodFreq
- int order = 0;
- while (Math.pow(mBase, order) < maxBufferPeriodFreq) {
- order += 1;
- }
- float height =( (float) (bottom - mXLabelSize - mLineWidth) / (order + 1)); // height for one decade
-
+ // find the closest order of "mYAxisBase" according to maxBufferPeriodFreq
+ int order = (int) Math.ceil((Math.log10(maxBufferPeriodFreq)) /
+ (Math.log10(mYAxisBase)));
+ float height = ((float) (bottom - mXLabelSize - mLineWidth) / (order + 1));
// y labels
- String[] yLabels = new String[order+2]; // will store {"0", "1", "10", "100", ...} for base = 10
+ String[] yLabels = new String[order + 2]; // store {"0", "1", "10", ...} for base = 10
yLabels[0] = "0";
int yStartPoint = bottom - mXLabelSize - mLineWidth;
canvas.drawText(yLabels[0], 0, yStartPoint, mTextPaint);
int currentValue = 1;
- for (int i = 1; i <= (order + 1); i++)
- {
+ for (int i = 1; i < yLabels.length; i++) {
yLabels[i] = Integer.toString(currentValue);
- // Label is displayed at a height that's lower than it should be by the amount of "mYLabelSize"
- canvas.drawText(yLabels[i], 0, yStartPoint - (i * height) + mYLabelSize, mTextPaint);
- currentValue *= mBase;
-
+ // Label is displayed at lower than it should be by the amount of "mYLabelSize"
+ canvas.drawText(yLabels[i], 0, yStartPoint - (i * height) + mYLabelSize,
+ mTextPaint);
+ currentValue *= mYAxisBase;
}
-
// draw x axis
canvas.drawLine(0, bottom - mXLabelSize, right, bottom - mXLabelSize, mLinePaint);
// draw y axis
- int yMargin = getTextWidth(yLabels[order+1], mTextPaint);
- canvas.drawLine(yMargin + mExtraYMargin, bottom, yMargin + mExtraYMargin, 0, mLinePaint);
+ int yMargin = getTextWidth(yLabels[order + 1], mTextPaint);
+ canvas.drawLine(yMargin + extraYMargin, bottom, yMargin + extraYMargin,
+ 0, mLinePaint);
// width of each beam in the histogram
- float width = ((float) (right - yMargin - mExtraYMargin - mLineWidth - range * mHistogramInterval) / range);
+ float width = ((float) (right - yMargin - extraYMargin - mLineWidth -
+ (range * beamInterval)) / range);
// draw x labels
- String[] xLabels = new String[mNumberOfXLabel];
- int xLabelInterval = (range - 2) / mNumberOfXLabel;
- xLabels[0] = "0"; // first label is for 0
- canvas.drawText(xLabels[0], yMargin - getTextWidth(xLabels[0], mXLabelPaint), bottom, mXLabelPaint);
-
- int xStartPoint = yMargin + mExtraYMargin + mLineWidth; // position where first beam is placed on x-axis
- for (int i = 1; i < mNumberOfXLabel; i++) {
- xLabels[i] = Integer.toString(i * xLabelInterval);
- canvas.drawText(xLabels[i], xStartPoint + (xLabelInterval * i * (width + mHistogramInterval)), bottom, mXLabelPaint);
+ String lastXLabel;
+ int xLabelInterval;
+ int xStartPoint = yMargin + extraYMargin + mLineWidth; // position of first beam
+ String[] xLabels;
+
+ // mNumberOfXLabel includes "0" but excludes the last label, which will be at last beam
+ // if mNumberOfXLabel exceeds the total beams that's going to have, reduce its value
+ if (mNumberOfXLabel - 1 > range - 2) {
+ mNumberOfXLabel = range - 1;
}
- String lastXLabel; // last label is for the last beam
- if (mExceedRange) {
- lastXLabel = Integer.toString(range - 1) + "+";
- } else {
- lastXLabel = Integer.toString(range - 1);
- }
- canvas.drawText(lastXLabel, right - getTextWidth(lastXLabel, mXLabelPaint) - 1, bottom, mXLabelPaint);
+ //
+ if (!isUsingDisplayData) { // in this case each beam represent one buffer period
+ if ((range - 2) < mNumberOfXLabel) {
+ xLabelInterval = 1;
+ } else {
+ xLabelInterval = (range - 2) / mNumberOfXLabel;
+ }
+
+ xLabels = new String[mNumberOfXLabel];
+ xLabels[0] = "0"; // first label is for 0
+ canvas.drawText(xLabels[0], yMargin + extraYMargin + mLineWidth, bottom,
+ mXLabelPaint);
+
+ float xLabelLineStartX;
+ float xLabelLineStartY;
+ int xLabelLineLength = 10;
+ for (int i = 1; i < mNumberOfXLabel; i++) {
+ xLabelLineStartX = xStartPoint +
+ (xLabelInterval * i * (width + beamInterval));
+ xLabels[i] = Integer.toString(i * xLabelInterval);
+ canvas.drawText(xLabels[i], xLabelLineStartX, bottom, mXLabelPaint);
+
+ //add a vertical line to indicate label's corresponding beams
+ xLabelLineStartY = bottom - mXLabelSize;
+ canvas.drawLine(xLabelLineStartX, xLabelLineStartY, xLabelLineStartX,
+ xLabelLineStartY - xLabelLineLength, mLinePaint);
+ }
+
+ // last label is for the last beam
+ if (exceedBufferPeriodRange) {
+ lastXLabel = Integer.toString(range - 1) + "+";
+ } else {
+ lastXLabel = Integer.toString(range - 1);
+ }
+ canvas.drawText(lastXLabel, right - getTextWidth(lastXLabel, mXLabelPaint) - 1,
+ bottom, mXLabelPaint);
+
+ } else { // in this case each beam represent a range of buffer period
+ // if mNumberOfXLabel exceeds amount of beams, decrease mNumberOfXLabel
+ if ((range - 2) < mNumberOfXLabel) {
+ xLabelInterval = 1;
+ } else {
+ xLabelInterval = (range - 2) / mNumberOfXLabel;
+ }
+
+ xLabels = new String[mNumberOfXLabel];
+ xLabels[0] = "0"; // first label is for 0ms
+ canvas.drawText(xLabels[0], yMargin + extraYMargin + mLineWidth, bottom,
+ mXLabelPaint);
+
+ // draw all the middle labels
+ for (int i = 1; i < mNumberOfXLabel; i++) {
+ xLabels[i] = Integer.toString((i * xLabelInterval) - 1) + "1-" +
+ Integer.toString(i * xLabelInterval) + "0";
+ canvas.drawText(xLabels[i], xStartPoint + (xLabelInterval * i *
+ (width + beamInterval)), bottom, mXLabelPaint);
+ }
+
+ // draw the last label for the last beam
+ if (exceedBufferPeriodRange) {
+ lastXLabel = Integer.toString(oldRange - 1) + "+";
+ } else {
+ if ((((range - 2) * interval) + 1) == oldRange - 1) {
+ lastXLabel = Integer.toString(oldRange - 1);
+ } else {
+ lastXLabel = Integer.toString(range - 2) + "1-" +
+ Integer.toString(oldRange - 1);
+ }
+ }
+
+ canvas.drawText(lastXLabel, right - getTextWidth(lastXLabel, mXLabelPaint) - 1,
+ bottom, mXLabelPaint);
+ }
// draw the histogram
- float currentLeft = yMargin + mExtraYMargin + mLineWidth; // FIXME there's an extra 1 pixel split, not sure why
+ float currentLeft = yMargin + extraYMargin + mLineWidth;
float currentTop;
float currentRight;
int currentBottom = bottom - mXLabelSize - mLineWidth;
-
for (int i = 0; i < range; i++) {
currentRight = currentLeft + width;
-
- // calculate the height of the beam
- if (mData[i] == 0) {
- currentTop = currentBottom;
- } else {
- float units = (float) ((Math.log10((double) mData[i])) + 1.0); // FIXME change it to have "mBase" as the baset
+ // calculate the height of the beam. Skip drawing if mDisplayData[i] = 0
+ if (mDisplayData[i] != 0) {
+ float units = (float) (((Math.log10((double) mDisplayData[i])) /
+ Math.log10(mYAxisBase)) + 1.0);
currentTop = currentBottom - (height * units);
+ canvas.drawRect(currentLeft, currentTop, currentRight,
+ currentBottom, mHistPaint);
}
- canvas.drawRect(currentLeft, currentTop, currentRight, currentBottom, mHistPaint);
- currentLeft = currentRight + mHistogramInterval;
+ currentLeft = currentRight + beamInterval;
}
}
-
}
- // get the width of a certain string, using a certain paint
+ /** get the width of "text" when using "paint". */
public int getTextWidth(String text, Paint paint) {
+ int width;
Rect bounds = new Rect();
paint.getTextBounds(text, 0, text.length(), bounds);
- int width = bounds.left + bounds.width();
+ width = bounds.left + bounds.width();
return width;
}
- void redraw() {
- invalidate();
+
+ /** Copy timestamp data into "mTimeStampData" */
+ public void setBufferPeriodTimeStampArray(int[] timeStamp) {
+ if (timeStamp == null) {
+ return;
+ }
+
+ if (mTimeStampData == null || timeStamp.length != mTimeStampData.length) {
+ mTimeStampData = new int[timeStamp.length];
+ }
+
+ System.arraycopy(timeStamp, 0, mTimeStampData, 0, timeStamp.length);
+ }
+
+
+ public int[] getBufferPeriodDisplayTimeStampArray() {
+ return mDisplayTimeStampData;
}
- // Copy data into internal buffer
- public static void setBufferPeriodArray(int[] pData) {
- if (mData == null || pData.length != mData.length) {
- mData = new int[pData.length];
+ /** Copy buffer period data into "mData" */
+ public void setBufferPeriodArray(int[] data) {
+ if (data == null) {
+ return;
+ }
+
+ if (mData == null || data.length != mData.length) {
+ mData = new int[data.length];
}
- System.arraycopy(pData, 0, mData, 0, pData.length);
- // postInvalidate();
+
+ System.arraycopy(data, 0, mData, 0, data.length);
}
- public static void setMaxBufferPeriod(int BufferPeriod) {
- mMaxBufferPeriod = BufferPeriod;
+
+ public void setMaxBufferPeriod(int ReadBufferPeriod) {
+ mMaxBufferPeriod = ReadBufferPeriod;
}
-}
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriodActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java
index 4353783..00b13ba 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriodActivity.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java
@@ -16,29 +16,38 @@
package org.drrickorang.loopback;
-import android.app.Activity;
-import android.os.Bundle;
-import android.view.View;
-import android.widget.TextView;
+import android.util.Log;
+
/**
- * Created by ninatai on 5/13/15.
+ * This thread is used to add load to CPU, in order to test performance of audio under load.
*/
-public class BufferPeriodActivity extends Activity {
- private HistogramView mHistogramView;
- private TextView mTextView;
+public class LoadThread extends Thread {
+ private static final String TAG = "LoadThread";
+
+ private volatile boolean mIsRunning;
+
- public void onCreate(Bundle savedInstanceState) {
+ public void run() {
+ log("Entering load thread");
+ long count = 0;
+ mIsRunning = true;
+ while(mIsRunning) {
+ count++;
+ }
- super.onCreate(savedInstanceState);
+ log("exiting CPU load thread with count = " + count);
+ }
- View view = getLayoutInflater().inflate(R.layout.buffer_period_activity, null);
- setContentView(view);
- mTextView = (TextView) findViewById(R.id.histogramInfo);
- mHistogramView = (HistogramView) findViewById(R.id.viewHistogram);
+ public void requestStop() {
+ mIsRunning = false;
+ }
+ private static void log(String msg) {
+ Log.v(TAG, msg);
}
+
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
index 0f3bc37..437db6e 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
@@ -16,137 +16,313 @@
package org.drrickorang.loopback;
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.util.Arrays;
+
import android.app.Activity;
+import android.content.ComponentName;
+import android.content.Context;
import android.content.Intent;
-import android.graphics.Bitmap;
-import android.net.Uri;
+import android.content.ServiceConnection;
import android.database.Cursor;
-import android.provider.MediaStore;
-import android.os.ParcelFileDescriptor;
-
-
-import java.io.FileDescriptor;
-
+import android.graphics.Bitmap;
import android.media.AudioManager;
-//import android.media.MediaPlayer;
+import android.net.Uri;
import android.os.Bundle;
+import android.os.Build;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Message;
+import android.os.ParcelFileDescriptor;
+import android.provider.MediaStore;
import android.text.format.DateFormat;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
-import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.SeekBar;
import android.widget.Toast;
import android.widget.TextView;
-import android.content.Context;
-import android.os.Handler;
-import android.os.Message;
-import java.io.FileOutputStream;
-import java.io.File;
-import android.os.Build;
+/**
+ * This is the main activity of the Loopback app. Two tests (latency test and buffer test) can be
+ * initiated here. Note: buffer test and glitch detection is the same test, it's just that this test
+ * has two parts of result.
+ */
public class LoopbackActivity extends Activity {
- /**
- * Member Vars
- */
-
- public final static String SETTINGS_OBJECT = "org.drrickorang.loopback.SETTINGS_OBJECT";
+ private static final String TAG = "LoopbackActivity";
private static final int SAVE_TO_WAVE_REQUEST = 42;
private static final int SAVE_TO_PNG_REQUEST = 43;
-
- private static final int SETTINGS_ACTIVITY_REQUEST_CODE = 44;
- private static final int ABOUT_ACTIVITY_REQUEST_CODE = 45;
- LoopbackAudioThread audioThread = null;
- NativeAudioThread nativeAudioThread = null;
+ private static final int SAVE_TO_TXT_REQUEST = 44;
+ private static final int SAVE_RECORDER_BUFFER_PERIOD_TO_TXT_REQUEST = 45;
+ private static final int SAVE_PLAYER_BUFFER_PERIOD_TO_TXT_REQUEST = 46;
+ private static final int SETTINGS_ACTIVITY_REQUEST_CODE = 54;
+ private static final int THREAD_SLEEP_DURATION_MS = 200;
+
+ LoopbackAudioThread mAudioThread = null;
+ NativeAudioThread mNativeAudioThread = null;
private WavePlotView mWavePlotView;
- private String mCurrentTime = "IncorrectTime"; // The time the plot is acquired
- private String mFilePathWav;
-
-
- SeekBar mBarMasterLevel; //drag the volumn
- TextView mTextInfo;
- TextView mTextViewCurrentLevel;
- TextView mTextViewEstimatedLatency;
- private double [] mWaveData;
- private Correlation mCorrelation = new Correlation();
- int mSamplingRate;
+ private String mCurrentTime = "IncorrectTime"; // The time the plot is acquired
+ private String mWaveFilePath; // path of the wave file
+
+ private SeekBar mBarMasterLevel; // drag the volume
+ private TextView mTextInfo;
+ private TextView mTextViewCurrentLevel;
+ private TextView mTextViewEstimatedLatency;
+ private Toast mToast;
+
+ private int mTestType;
+ private double [] mWaveData; // this is where we store the data for the wave plot
+ private Correlation mCorrelation = new Correlation();
+ private BufferPeriod mRecorderBufferPeriod = new BufferPeriod();
+ private BufferPeriod mPlayerBufferPeriod = new BufferPeriod();
+
+ // for native buffer period
+ private int[] mNativeRecorderBufferPeriodArray;
+ private int mNativeRecorderMaxBufferPeriod;
+ private int[] mNativePlayerBufferPeriodArray;
+ private int mNativePlayerMaxBufferPeriod;
+
+ private static final String INTENT_SAMPLING_FREQUENCY = "SF";
+ private static final String INTENT_FILENAME = "FileName";
+ private static final String INTENT_RECORDER_BUFFER = "RecorderBuffer";
+ private static final String INTENT_PLAYER_BUFFER = "PlayerBuffer";
+ private static final String INTENT_AUDIO_THREAD = "AudioThread";
+ private static final String INTENT_MIC_SOURCE = "MicSource";
+ private static final String INTENT_AUDIO_LEVEL = "AudioLevel";
+ private static final String INTENT_TEST_TYPE = "TestType";
+ private static final String INTENT_BUFFER_TEST_DURATION = "BufferTestDuration";
+
+ // for running the test using adb command
+ private boolean mIntentRunning = false; // if it is running triggered by intent with parameters
+ private String mIntentFileName;
+ private int mIntentSamplingRate = 0;
+ private int mIntentPlayerBuffer = 0;
+ private int mIntentRecorderBuffer = 0;
+ private int mIntentMicSource = -1;
+ private int mIntentAudioThread = -1;
+ private int mIntentAudioLevel = -1;
+ private int mIntentTestType = -1;
+ private int mIntentBufferTestDuration = 0; // in second
+
+ // Note: these four values should only be assigned in restartAudioSystem()
+ private int mAudioThreadType = Constant.UNKNOWN;
+ private int mSamplingRate;
+ private int mPlayerBufferSizeInBytes;
+ private int mRecorderBufferSizeInBytes;
+
+ // for buffer test
+ private int[] mGlitchesData;
+ private boolean mGlitchingIntervalTooLong;
+ private int mFFTSamplingSize;
+ private int mFFTOverlapSamples;
+ private int mBufferTestDuration; //in second
+
+ // threads that load CPUs
+ private static final int mNumberOfLoadThreads = 4;
+ private LoadThread[] mLoadThreads;
+
+ // for getting the Service
+ boolean mBound = false;
+ private AudioTestService mAudioTestService;
+ private final ServiceConnection mServiceConnection = new ServiceConnection() {
+ public void onServiceConnected(ComponentName className, IBinder service) {
+ mAudioTestService = ((AudioTestService.AudioTestBinder) service).getService();
+ mBound = true;
+ }
- Toast toast;
+ public void onServiceDisconnected(ComponentName className) {
+ mAudioTestService = null;
+ mBound = false;
+ }
+ };
private Handler mMessageHandler = new Handler() {
public void handleMessage(Message msg) {
super.handleMessage(msg);
- switch(msg.what) {
- case LoopbackAudioThread.FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_STARTED:
- log("got message java rec started!!");
- showToast("Java Recording Started");
- refreshState();
- break;
- case LoopbackAudioThread.FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_ERROR:
- log("got message java rec can't start!!");
- showToast("Java Recording Error. Please try again");
+ switch (msg.what) {
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
+ log("got message java latency test started!!");
+ showToast("Java Latency Test Started");
+ resetResults();
+ refreshState();
+ refreshPlots();
+ break;
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
+ log("got message java latency test rec can't start!!");
+ showToast("Java Latency Test Recording Error. Please try again");
+ refreshState();
+ stopAudioTestThreads();
+ mIntentRunning = false;
+ refreshSoundLevelBar();
+ break;
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+ if (mAudioThread != null) {
+ mWaveData = mAudioThread.getWaveData();
+ mCorrelation.computeCorrelation(mWaveData, mSamplingRate);
+ log("got message java latency rec complete!!");
+ refreshPlots();
refreshState();
- stopAudioThread();
- break;
- case LoopbackAudioThread.FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_COMPLETE:
- if(audioThread != null) {
- mWaveData = audioThread.getWaveData();
- mSamplingRate = audioThread.mSamplingRate;
- mCorrelation.computeCorrelation(mWaveData,mSamplingRate);
- log("got message java rec complete!!");
- refreshPlots();
- refreshState();
- mCurrentTime = (String) DateFormat.format("MMddkkmmss", System.currentTimeMillis());
- showToast("Java Recording Completed");
- stopAudioThread();
+ mCurrentTime = (String) DateFormat.format("MMddkkmmss",
+ System.currentTimeMillis());
+ mBufferTestDuration = mAudioThread.getDurationInSeconds();
+
+ switch (msg.what) {
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+ showToast("Java Latency Test Stopped");
+ break;
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+ showToast("Java Latency Test Completed");
+ break;
}
- break;
- case NativeAudioThread.FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_STARTED:
- log("got message native rec started!!");
- showToast("Native Recording Started");
- refreshState();
- break;
- case NativeAudioThread.FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_ERROR:
- log("got message native rec can't start!!");
- showToast("Native Recording Error. Please try again");
+
+ stopAudioTestThreads();
+ if (mIntentRunning && mIntentFileName != null && mIntentFileName.length() > 0) {
+ saveAllTo(mIntentFileName);
+ }
+ mIntentRunning = false;
+ }
+ refreshSoundLevelBar();
+ break;
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
+ log("got message java buffer test rec started!!");
+ showToast("Java Buffer Test Started");
+ resetResults();
+ refreshState();
+ refreshPlots();
+ break;
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
+ log("got message java buffer test rec can't start!!");
+ showToast("Java Buffer Test Recording Error. Please try again");
+ refreshState();
+ stopAudioTestThreads();
+ mIntentRunning = false;
+ refreshSoundLevelBar();
+ break;
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+ if (mAudioThread != null) {
+ mWaveData = mAudioThread.getWaveData();
+ mGlitchesData = mAudioThread.getAllGlitches();
+ mGlitchingIntervalTooLong = mAudioThread.getGlitchingIntervalTooLong();
+ mFFTSamplingSize = mAudioThread.getFFTSamplingSize();
+ mFFTOverlapSamples = mAudioThread.getFFTOverlapSamples();
+ refreshPlots(); // only plot that last few seconds
refreshState();
- break;
- case NativeAudioThread.FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE:
- case NativeAudioThread.FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS:
- if(nativeAudioThread != null) {
- mWaveData = nativeAudioThread.getWaveData();
- mSamplingRate = nativeAudioThread.mSamplingRate;
+ mCurrentTime = (String) DateFormat.format("MMddkkmmss",
+ System.currentTimeMillis());
+ switch (msg.what) {
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+ showToast("Java Buffer Test Stopped");
+ break;
+ case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+ showToast("Java Buffer Test Completed");
+ break;
+ }
+
+ stopAudioTestThreads();
+ if (mIntentRunning && mIntentFileName != null && mIntentFileName.length() > 0) {
+ saveAllTo(mIntentFileName);
+ }
+ mIntentRunning = false;
+ }
+ refreshSoundLevelBar();
+ break;
+ case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
+ log("got message native latency test rec started!!");
+ showToast("Native Latency Test Started");
+ resetResults();
+ refreshState();
+ refreshPlots();
+ break;
+ case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
+ log("got message native buffer test rec started!!");
+ showToast("Native Buffer Test Started");
+ resetResults();
+ refreshState();
+ refreshPlots();
+ break;
+ case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
+ log("got message native latency test rec can't start!!");
+ showToast("Native Latency Test Recording Error. Please try again");
+ refreshState();
+ mIntentRunning = false;
+ refreshSoundLevelBar();
+ break;
+ case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
+ log("got message native buffer test rec can't start!!");
+ showToast("Native Buffer Test Recording Error. Please try again");
+ refreshState();
+ mIntentRunning = false;
+ refreshSoundLevelBar();
+ break;
+ case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_STOP:
+ case NativeAudioThread.
+ LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+ case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+ case NativeAudioThread.
+ LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS:
+ if (mNativeAudioThread != null) {
+ mGlitchesData = mNativeAudioThread.getNativeAllGlitches();
+ mGlitchingIntervalTooLong = mNativeAudioThread.getGlitchingIntervalTooLong();
+ mFFTSamplingSize = mNativeAudioThread.getNativeFFTSamplingSize();
+ mFFTOverlapSamples = mNativeAudioThread.getNativeFFTOverlapSamples();
+ mBufferTestDuration = mNativeAudioThread.getDurationInSeconds();
+ mWaveData = mNativeAudioThread.getWaveData();
+ mNativeRecorderBufferPeriodArray = mNativeAudioThread.getRecorderBufferPeriod();
+ mNativeRecorderMaxBufferPeriod = mNativeAudioThread.
+ getRecorderMaxBufferPeriod();
+ mNativePlayerBufferPeriodArray = mNativeAudioThread.getPlayerBufferPeriod();
+ mNativePlayerMaxBufferPeriod = mNativeAudioThread.getPlayerMaxBufferPeriod();
+
+ if (msg.what != NativeAudioThread.
+ LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE) {
mCorrelation.computeCorrelation(mWaveData, mSamplingRate);
- log("got message native rec complete!!");
- refreshPlots();
- refreshState();
- if(msg.what == NativeAudioThread.FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS) {
- mCurrentTime = (String) DateFormat.format("MMddkkmmss", System.currentTimeMillis());
- showToast("Native Recording Completed with ERRORS");
- } else {
- mCurrentTime = (String) DateFormat.format("MMddkkmmss", System.currentTimeMillis());
- showToast("Native Recording Completed");
- }
- stopAudioThread();
}
- break;
- default:
- log("Got message:"+msg.what);
- break;
+
+ log("got message native buffer test rec complete!!");
+ refreshPlots();
+ refreshState();
+ mCurrentTime = (String) DateFormat.format("MMddkkmmss",
+ System.currentTimeMillis());
+ switch (msg.what) {
+ case NativeAudioThread.
+ LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS:
+ showToast("Native Test Completed with Destroying Errors");
+ break;
+ case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_STOP:
+ showToast("Native Test Stopped");
+ break;
+ default:
+ showToast("Native Test Completed");
+ break;
+ }
+
+
+ stopAudioTestThreads();
+ if (mIntentRunning && mIntentFileName != null && mIntentFileName.length() > 0) {
+ saveAllTo(mIntentFileName);
+ }
+ mIntentRunning = false;
+
+
+ }
+ refreshSoundLevelBar();
+ break;
+ default:
+ log("Got message:" + msg.what);
+ break;
}
}
};
- // Thread thread;
-
- /**
- * Called with the activity is first created.
- */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
@@ -173,7 +349,6 @@ public class LoopbackActivity extends Activity {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
-
AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
am.setStreamVolume(AudioManager.STREAM_MUSIC,
progress, 0);
@@ -184,253 +359,663 @@ public class LoopbackActivity extends Activity {
mWavePlotView = (WavePlotView) findViewById(R.id.viewWavePlot);
mTextViewCurrentLevel = (TextView) findViewById(R.id.textViewCurrentLevel);
+ mTextViewCurrentLevel.setTextSize(15);
mTextViewEstimatedLatency = (TextView) findViewById(R.id.textViewEstimatedLatency);
refreshState();
+
+ applyIntent(getIntent());
+ }
+
+
+ @Override
+ protected void onStart() {
+ super.onStart();
+ Intent audioTestIntent = new Intent(this, AudioTestService.class);
+ startService(audioTestIntent);
+ boolean bound = bindService(audioTestIntent, mServiceConnection, Context.BIND_AUTO_CREATE);
+ if (bound) {
+ log("Successfully bound to service!");
+ }
+ else {
+ log("Failed to bind service!");
+ }
}
- private void stopAudioThread() {
+
+ @Override
+ protected void onStop() {
+ super.onStop();
+ log("Activity on stop!");
+ // Unbind from the service
+ if (mBound) {
+ unbindService(mServiceConnection);
+ mBound = false;
+ }
+ }
+
+
+ @Override
+ public void onNewIntent(Intent intent) {
+ log("On New Intent called!");
+ applyIntent(intent);
+ }
+
+
+ /**
+ * This method will be called whenever the test starts running (either by operating on the
+ * device or by adb command). In the case where the test is started through adb command,
+ * adb parameters will be read into intermediate variables.
+ */
+ private void applyIntent(Intent intent) {
+ Bundle b = intent.getExtras();
+ if (b != null && !mIntentRunning) {
+ // adb shell am start -n org.drrickorang.loopback/.LoopbackActivity
+ // --ei SF 48000 --es FileName test1 --ei RecorderBuffer 512 --ei PlayerBuffer 512
+ // --ei AudioThread 1 --ei MicSource 3 --ei AudioLevel 12
+ // --ei TestType 223 --ei BufferTestDuration 60
+
+ // Note: for native mode, player and recorder buffer sizes are the same, and can only be
+ // set through player buffer size
+ if (b.containsKey(INTENT_TEST_TYPE)) {
+ mIntentTestType = b.getInt(INTENT_TEST_TYPE);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_BUFFER_TEST_DURATION)) {
+ mIntentBufferTestDuration = b.getInt(INTENT_BUFFER_TEST_DURATION);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_SAMPLING_FREQUENCY)) {
+ mIntentSamplingRate = b.getInt(INTENT_SAMPLING_FREQUENCY);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_FILENAME)) {
+ mIntentFileName = b.getString(INTENT_FILENAME);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_RECORDER_BUFFER)) {
+ mIntentRecorderBuffer = b.getInt(INTENT_RECORDER_BUFFER);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_PLAYER_BUFFER)) {
+ mIntentPlayerBuffer = b.getInt(INTENT_PLAYER_BUFFER);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_AUDIO_THREAD)) {
+ mIntentAudioThread = b.getInt(INTENT_AUDIO_THREAD);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_MIC_SOURCE)) {
+ mIntentMicSource = b.getInt(INTENT_MIC_SOURCE);
+ mIntentRunning = true;
+ }
+
+ if (b.containsKey(INTENT_AUDIO_LEVEL)) {
+ mIntentAudioLevel = b.getInt(INTENT_AUDIO_LEVEL);
+ mIntentRunning = true;
+ }
+
+ log("Intent " + INTENT_TEST_TYPE + ": " + mIntentTestType);
+ log("Intent " + INTENT_BUFFER_TEST_DURATION + ": " + mIntentBufferTestDuration);
+ log("Intent " + INTENT_SAMPLING_FREQUENCY + ": " + mIntentSamplingRate);
+ log("Intent " + INTENT_FILENAME + ": " + mIntentFileName);
+ log("Intent " + INTENT_RECORDER_BUFFER + ": " + mIntentRecorderBuffer);
+ log("Intent " + INTENT_PLAYER_BUFFER + ": " + mIntentPlayerBuffer);
+ log("Intent " + INTENT_AUDIO_THREAD + ":" + mIntentAudioThread);
+ log("Intent " + INTENT_MIC_SOURCE + ": " + mIntentMicSource);
+ log("Intent " + INTENT_AUDIO_LEVEL + ": " + mIntentAudioLevel);
+
+ if (!mIntentRunning) {
+ log("No info to actually run intent.");
+ }
+
+ runIntentTest();
+ } else {
+ log("warning: can't run this intent, system busy");
+ showToast("System Busy. Stop sending intents!");
+ }
+ }
+
+
+ /**
+ * In the case where the test is started through adb command, this method will change the
+ * settings if any parameter is specified.
+ */
+ private void runIntentTest() {
+ // mIntentRunning == true if test is started through adb command.
+ if (mIntentRunning) {
+ if (mIntentBufferTestDuration > 0) {
+ getApp().setBufferTestDuration(mIntentBufferTestDuration);
+ }
+
+ if (mIntentAudioLevel >= 0) {
+ AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+ am.setStreamVolume(AudioManager.STREAM_MUSIC,
+ mIntentAudioLevel, 0);
+ }
+
+ if (mIntentSamplingRate != 0) {
+ getApp().setSamplingRate(mIntentSamplingRate);
+ }
+
+ if (mIntentMicSource >= 0) {
+ getApp().setMicSource(mIntentMicSource);
+ }
+
+ if (mIntentAudioThread >= 0) {
+ getApp().setAudioThreadType(mIntentAudioThread);
+ getApp().computeDefaults();
+ }
+
+ int bytesPerFrame = Constant.BYTES_PER_FRAME;
+
+ if (mIntentRecorderBuffer > 0) {
+ getApp().setRecorderBufferSizeInBytes(mIntentRecorderBuffer * bytesPerFrame);
+ }
+
+ if (mIntentPlayerBuffer > 0) {
+ getApp().setPlayerBufferSizeInBytes(mIntentPlayerBuffer * bytesPerFrame);
+ }
+
+ refreshState();
+
+ if (mIntentTestType >= 0) {
+ switch (mIntentTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ startLatencyTest();
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ startBufferTest();
+ break;
+ default:
+ assert(false);
+ }
+ } else {
+ // if test type is not specified in command, just run latency test
+ startLatencyTest();
+ }
+
+ }
+ }
+
+
+ /** Stop all currently running threads that are related to audio test. */
+ private void stopAudioTestThreads() {
log("stopping audio threads");
- if (audioThread != null) {
- audioThread.isRunning = false;
- // isRunning = false;
+ if (mAudioThread != null) {
try {
- audioThread.finish();
- audioThread.join();
+ mAudioThread.finish();
+ mAudioThread.join(Constant.JOIN_WAIT_TIME_MS);
} catch (InterruptedException e) {
e.printStackTrace();
}
- audioThread = null;
+ mAudioThread = null;
}
- if (nativeAudioThread != null) {
- nativeAudioThread.isRunning = false;
- // isRunning = false;
+
+ if (mNativeAudioThread != null) {
try {
- nativeAudioThread.finish();
- nativeAudioThread.join();
+ mNativeAudioThread.finish();
+ mNativeAudioThread.join(Constant.JOIN_WAIT_TIME_MS);
} catch (InterruptedException e) {
e.printStackTrace();
}
- nativeAudioThread = null;
+ mNativeAudioThread = null;
}
+
+ stopLoadThreads();
System.gc();
}
+
public void onDestroy() {
- stopAudioThread();
+ stopAudioTestThreads();
super.onDestroy();
+ stopService(new Intent(this, AudioTestService.class));
}
+
@Override
protected void onResume() {
- // TODO Auto-generated method stub
super.onResume();
- //web.loadUrl(stream);
log("on resume called");
-
- //restartAudioSystem();
}
+
@Override
- protected void onPause () {
+ protected void onPause() {
super.onPause();
- //stop audio system
- stopAudioThread();
}
- public boolean isBusy() {
+ /** Check if the app is busy (running test). */
+ public boolean isBusy() {
boolean busy = false;
- if( audioThread != null) {
- if(audioThread.isRunning)
- busy = true;
+ if (mAudioThread != null && mAudioThread.mIsRunning) {
+ busy = true;
}
- if( nativeAudioThread != null) {
- if(nativeAudioThread.isRunning)
- busy = true;
+ if (mNativeAudioThread != null && mNativeAudioThread.mIsRunning) {
+ busy = true;
}
return busy;
- }
+ }
- private void restartAudioSystem() {
+ /** Create a new audio thread according to the settings. */
+ private void restartAudioSystem() {
log("restart audio system...");
- AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int sessionId = 0; /* FIXME runtime test for am.generateAudioSessionId() in API 21 */
- int samplingRate = getApp().getSamplingRate();
- int playbackBuffer = getApp().getPlayBufferSizeInBytes();
- int recordBuffer = getApp().getRecordBufferSizeInBytes();
+ mAudioThreadType = getApp().getAudioThreadType();
+ mSamplingRate = getApp().getSamplingRate();
+ mPlayerBufferSizeInBytes = getApp().getPlayerBufferSizeInBytes();
+ mRecorderBufferSizeInBytes = getApp().getRecorderBufferSizeInBytes();
int micSource = getApp().getMicSource();
-
- log(" current sampling rate: " + samplingRate);
- stopAudioThread();
-
- //select if java or native audio thread
- if (getApp().getAudioThreadType() == LoopbackApplication.AUDIO_THREAD_TYPE_JAVA ) {
- int micSourceMapped = getApp().mapMicSource(LoopbackApplication.AUDIO_THREAD_TYPE_JAVA ,micSource);
- audioThread = new LoopbackAudioThread();
- audioThread.setMessageHandler(mMessageHandler);
- audioThread.mSessionId = sessionId;
- audioThread.setParams(samplingRate, playbackBuffer, recordBuffer,micSourceMapped);
- audioThread.start();
- } else {
- int micSourceMapped = getApp().mapMicSource(LoopbackApplication.AUDIO_THREAD_TYPE_NATIVE ,micSource);
- nativeAudioThread = new NativeAudioThread();
- nativeAudioThread.setMessageHandler(mMessageHandler);
- nativeAudioThread.mSessionId = sessionId;
- nativeAudioThread.setParams(samplingRate, playbackBuffer, recordBuffer,micSourceMapped);
- nativeAudioThread.start();
+ int bufferTestDurationInSeconds = getApp().getBufferTestDuration();
+ int bufferTestWavePlotDurationInSeconds = getApp().getBufferTestWavePlotDuration();
+
+ log(" current sampling rate: " + mSamplingRate);
+ stopAudioTestThreads();
+
+ // select java or native audio thread
+ int micSourceMapped;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ micSourceMapped = getApp().mapMicSource(Constant.AUDIO_THREAD_TYPE_JAVA, micSource);
+ mAudioThread = new LoopbackAudioThread(mSamplingRate, mPlayerBufferSizeInBytes,
+ mRecorderBufferSizeInBytes, micSourceMapped, mRecorderBufferPeriod,
+ mPlayerBufferPeriod, mTestType, bufferTestDurationInSeconds,
+ bufferTestWavePlotDurationInSeconds, getApplicationContext());
+ mAudioThread.setMessageHandler(mMessageHandler);
+ mAudioThread.mSessionId = sessionId;
+ mAudioThread.start();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ micSourceMapped = getApp().mapMicSource(Constant.AUDIO_THREAD_TYPE_NATIVE, micSource);
+ // Note: mRecorderBufferSizeInBytes will not actually be used, since recorder buffer
+ // size = player buffer size in native mode
+ mNativeAudioThread = new NativeAudioThread(mSamplingRate, mPlayerBufferSizeInBytes,
+ mRecorderBufferSizeInBytes, micSourceMapped, mTestType,
+ bufferTestDurationInSeconds, bufferTestWavePlotDurationInSeconds);
+ mNativeAudioThread.setMessageHandler(mMessageHandler);
+ mNativeAudioThread.mSessionId = sessionId;
+ mNativeAudioThread.start();
+ break;
}
- mWavePlotView.setSamplingRate( samplingRate);
+ startLoadThreads();
- //first refresh
+ mWavePlotView.setSamplingRate(mSamplingRate);
refreshState();
}
- private void resetBufferPeriodRecord() {
- BufferPeriod.resetRecord();
+
+
+ /** Start all LoadThread. */
+ private void startLoadThreads() {
+ mLoadThreads = new LoadThread[mNumberOfLoadThreads];
+
+ for (int i = 0; i < mLoadThreads.length; i++) {
+ mLoadThreads[i] = new LoadThread();
+ mLoadThreads[i].start();
+ }
+ }
+
+
+ /** Stop all LoadThread. */
+ private void stopLoadThreads() {
+ log("stopping load threads");
+ if (mLoadThreads != null) {
+ for (int i = 0; i < mLoadThreads.length; i++) {
+ if (mLoadThreads[i] != null) {
+ try {
+ mLoadThreads[i].requestStop();
+ mLoadThreads[i].join(Constant.JOIN_WAIT_TIME_MS);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ mLoadThreads[i] = null;
+ }
+ }
+ }
+ }
+
+
+ private void resetBufferPeriodRecord(BufferPeriod recorderBufferPeriod,
+ BufferPeriod playerBufferPeriod) {
+ recorderBufferPeriod.resetRecord();
+ playerBufferPeriod.resetRecord();
+ }
+
+
+ /** Start the latency test. */
+ public void onButtonLatencyTest(View view) {
+ startLatencyTest();
}
- /** Called when the user clicks the button */
- public void onButtonTest(View view) {
- int samplingRate = getApp().getSamplingRate();
- int playbackBuffer = getApp().getPlayBufferSizeInBytes()/getApp().BYTES_PER_FRAME;
- int recordBuffer = getApp().getRecordBufferSizeInBytes()/getApp().BYTES_PER_FRAME;
- int micSource = getApp().getMicSource();
- String micSourceName = getApp().getMicSourceString(micSource);
- int audioThreadType = getApp().getAudioThreadType();
- log("On button test sampling rate: " + samplingRate);
- log("On button test playbackBuffer: " + playbackBuffer);
- log("On button test recordBuffer: " + recordBuffer);
- log("On button test micSource Name: " + micSourceName);
- log("On button test thread type: " + audioThreadType); //java =0, native = 1
- if( !isBusy()) {
+ private void startLatencyTest() {
+ if (!isBusy()) {
+ mBarMasterLevel.setEnabled(false);
+ resetBufferPeriodRecord(mRecorderBufferPeriod, mPlayerBufferPeriod);
+ mTestType = Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY;
restartAudioSystem();
- resetBufferPeriodRecord();
try {
- Thread.sleep(200);
+ Thread.sleep(THREAD_SLEEP_DURATION_MS);
} catch (InterruptedException e) {
e.printStackTrace();
}
- if (getApp().getAudioThreadType() == LoopbackApplication.AUDIO_THREAD_TYPE_JAVA) {
- if (audioThread != null) {
- audioThread.runTest();
+
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ if (mAudioThread != null) {
+ mAudioThread.runTest();
}
- } else {
- if (nativeAudioThread != null) {
- nativeAudioThread.runTest();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ if (mNativeAudioThread != null) {
+ mNativeAudioThread.runTest();
}
+ break;
}
-
} else {
- //please wait, or restart application.
-// Toast.makeText(getApplicationContext(), "Test in progress... please wait",
-// Toast.LENGTH_SHORT).show();
-
showToast("Test in progress... please wait");
}
-
}
- /** Called when the user clicks the button */
- public void onButtonSave(View view) {
- //create filename with date
- String date = mCurrentTime; // the time the plot is acquired
- String micSource = getApp().getMicSourceString(getApp().getMicSource());
- String fileName = "loopback_"+/*micSource+"_"+*/date;
+ /** Start the Buffer (Glitch Detection) Test. */
+ public void onButtonBufferTest(View view) {
+ startBufferTest();
+ }
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+ private void startBufferTest() {
+ if (!isBusy()) {
+ mBarMasterLevel.setEnabled(false);
+ resetBufferPeriodRecord(mRecorderBufferPeriod, mPlayerBufferPeriod);
+ mTestType = Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD;
+ restartAudioSystem(); // in this function a audio thread is created
+ try {
+ Thread.sleep(THREAD_SLEEP_DURATION_MS);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
- Intent intent2 = new Intent(Intent.ACTION_CREATE_DOCUMENT);
- intent2.addCategory(Intent.CATEGORY_OPENABLE);
- intent2.setType("image/png");
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ if (mAudioThread != null) {
+ mAudioThread.runBufferTest();
+ }
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ if (mNativeAudioThread != null) {
+ mNativeAudioThread.runBufferTest();
+ }
+ break;
+ }
+ } else {
+ int duration = 0;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ duration = mAudioThread.getDurationInSeconds();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ duration = mNativeAudioThread.getDurationInSeconds();
+ break;
+ }
+ showToast("Long-run Test in progress, in total should take " +
+ Integer.toString(duration) + "s, please wait");
+ }
+ }
- intent2.putExtra(Intent.EXTRA_TITLE, fileName + ".png"); //suggested filename
- startActivityForResult(intent2, SAVE_TO_PNG_REQUEST);
- // browser.
- Intent intent = new Intent(Intent.ACTION_CREATE_DOCUMENT);
- intent.addCategory(Intent.CATEGORY_OPENABLE);
- intent.setType("audio/wav");
+ /** Stop the ongoing test. */
+ public void onButtonStopTest(View view) throws InterruptedException{
+ if (mAudioThread != null) {
+ mAudioThread.requestStopTest();
+ }
- // sometimes ".wav" will be added automatically, sometimes not
- intent.putExtra(Intent.EXTRA_TITLE, fileName + ".wav"); //suggested filename
- startActivityForResult(intent, SAVE_TO_WAVE_REQUEST);
+ if (mNativeAudioThread != null) {
+ mNativeAudioThread.requestStopTest();
+ }
+ }
+
+ /**
+ * Save five files: one .png file for a screenshot on the main activity, one .wav file for
+ * the plot displayed on the main activity, one .txt file for storing various test results, one
+ * .txt file for storing recorder buffer period data, and one .txt file for storing player
+ * buffer period data.
+ */
+ public void onButtonSave(View view) {
+ if (!isBusy()) {
+ //create filename with date
+ String date = mCurrentTime; // the time the plot is acquired
+ //String micSource = getApp().getMicSourceString(getApp().getMicSource());
+ String fileName = "loopback_" + date;
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+ Intent intent = new Intent(Intent.ACTION_CREATE_DOCUMENT);
+ intent.addCategory(Intent.CATEGORY_OPENABLE);
+ intent.setType("text/plain");
+ intent.putExtra(Intent.EXTRA_TITLE, fileName + ".txt"); //suggested filename
+ startActivityForResult(intent, SAVE_TO_TXT_REQUEST);
+
+ Intent intent2 = new Intent(Intent.ACTION_CREATE_DOCUMENT);
+ intent2.addCategory(Intent.CATEGORY_OPENABLE);
+ intent2.setType("image/png");
+ intent2.putExtra(Intent.EXTRA_TITLE, fileName + ".png"); //suggested filename
+ startActivityForResult(intent2, SAVE_TO_PNG_REQUEST);
+
+ //sometimes ".wav" will be added automatically, sometimes not
+ Intent intent3 = new Intent(Intent.ACTION_CREATE_DOCUMENT);
+ intent3.addCategory(Intent.CATEGORY_OPENABLE);
+ intent3.setType("audio/wav");
+ intent3.putExtra(Intent.EXTRA_TITLE, fileName + ".wav"); //suggested filename
+ startActivityForResult(intent3, SAVE_TO_WAVE_REQUEST);
+
+ fileName = "loopback_" + date + "_recorderBufferPeriod";
+ Intent intent4 = new Intent(Intent.ACTION_CREATE_DOCUMENT);
+ intent4.addCategory(Intent.CATEGORY_OPENABLE);
+ intent4.setType("text/plain");
+ intent4.putExtra(Intent.EXTRA_TITLE, fileName + ".txt");
+ startActivityForResult(intent4, SAVE_RECORDER_BUFFER_PERIOD_TO_TXT_REQUEST);
+
+ fileName = "loopback_" + date + "_playerBufferPeriod";
+ Intent intent5 = new Intent(Intent.ACTION_CREATE_DOCUMENT);
+ intent5.addCategory(Intent.CATEGORY_OPENABLE);
+ intent5.setType("text/plain");
+ intent5.putExtra(Intent.EXTRA_TITLE, fileName + ".txt");
+ startActivityForResult(intent5, SAVE_PLAYER_BUFFER_PERIOD_TO_TXT_REQUEST);
+ } else {
+ saveAllTo(fileName);
+ }
} else {
- showToast("Saving Wave to: "+fileName+".wav");
+ showToast("Test in progress... please wait");
+ }
+ }
- //save to a given uri... local file?
- Uri uri = Uri.parse("file://mnt/sdcard/"+fileName+".wav");
- // for some devices it cannot find the path
- String temp = getPath1(uri);
- if (temp != null) {
- File file = new File(temp);
- mFilePathWav = file.getAbsolutePath();
- } else {
- mFilePathWav = "";
+ /** See the documentation on onButtonSave() */
+ public void saveAllTo(String fileName) {
+ showToast("Saving files to: " + fileName + ".(wav,png,txt)");
- }
+ //save to a given uri... local file?
+ Uri uri = Uri.parse("file://mnt/sdcard/" + fileName + ".wav");
+ String temp = getPath(uri);
- saveToWavefile(uri);
- Uri uri2 = Uri.parse("file://mnt/sdcard/"+fileName+".png");
- saveScreenShot(uri2);
+ // for some devices it cannot find the path
+ if (temp != null) {
+ File file = new File(temp);
+ mWaveFilePath = file.getAbsolutePath();
+ } else {
+ mWaveFilePath = "";
}
+
+ saveToWaveFile(uri);
+ Uri uri2 = Uri.parse("file://mnt/sdcard/" + fileName + ".png");
+ saveScreenShot(uri2);
+
+ Uri uri3 = Uri.parse("file://mnt/sdcard/" + fileName + ".txt");
+ saveReport(uri3);
+
+ String fileName2 = fileName + "_recorderBufferPeriod";
+ Uri uri4 = Uri.parse("file://mnt/sdcard/" + fileName2 + ".txt");
+ int[] bufferPeriodArray = null;
+ int maxBufferPeriod = Constant.UNKNOWN;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
+ maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ bufferPeriodArray = mNativeRecorderBufferPeriodArray;
+ maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
+ break;
+ }
+ saveBufferPeriod(uri4, bufferPeriodArray, maxBufferPeriod);
+
+ String fileName3 = fileName + "_playerBufferPeriod";
+ Uri uri5 = Uri.parse("file://mnt/sdcard/" + fileName3 + ".txt");
+ bufferPeriodArray = null;
+ maxBufferPeriod = Constant.UNKNOWN;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
+ maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ bufferPeriodArray = mNativePlayerBufferPeriodArray;
+ maxBufferPeriod = mNativePlayerMaxBufferPeriod;
+ break;
+ }
+ saveBufferPeriod(uri5, bufferPeriodArray, maxBufferPeriod);
}
+
@Override
- public void onActivityResult(int requestCode, int resultCode,
- Intent resultData) {
+ public void onActivityResult(int requestCode, int resultCode, Intent resultData) {
log("ActivityResult request: " + requestCode + " result:" + resultCode);
- if (requestCode == SAVE_TO_WAVE_REQUEST && resultCode == Activity.RESULT_OK) {
- log("got SAVE TO WAV intent back!");
- Uri uri = null;
- if (resultData != null) {
- uri = resultData.getData();
-
- String temp = getPath1(uri);
- if (temp != null) {
- File file = new File(temp);
- mFilePathWav = file.getAbsolutePath();
- } else {
- mFilePathWav = "";
+ if (resultCode == Activity.RESULT_OK) {
+ Uri uri;
+ switch (requestCode) {
+ case SAVE_TO_WAVE_REQUEST:
+ log("got SAVE TO WAV intent back!");
+ if (resultData != null) {
+ uri = resultData.getData();
+ String temp = getPath(uri);
+ if (temp != null) {
+ File file = new File(temp);
+ mWaveFilePath = file.getAbsolutePath();
+ } else {
+ mWaveFilePath = "";
+ }
+ saveToWaveFile(uri);
+ }
+ break;
+ case SAVE_TO_PNG_REQUEST:
+ log("got SAVE TO PNG intent back!");
+ if (resultData != null) {
+ uri = resultData.getData();
+ saveScreenShot(uri);
}
+ break;
+ case SAVE_TO_TXT_REQUEST:
+ if (resultData != null) {
+ uri = resultData.getData();
+ saveReport(uri);
+ }
+ break;
+ case SAVE_RECORDER_BUFFER_PERIOD_TO_TXT_REQUEST:
+ if (resultData != null) {
+ uri = resultData.getData();
+ int[] bufferPeriodArray = null;
+ int maxBufferPeriod = Constant.UNKNOWN;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
+ maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ bufferPeriodArray = mNativeRecorderBufferPeriodArray;
+ maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
+ break;
+ }
+ saveBufferPeriod(uri, bufferPeriodArray, maxBufferPeriod);
+ }
+ break;
+ case SAVE_PLAYER_BUFFER_PERIOD_TO_TXT_REQUEST:
+ if (resultData != null) {
+ uri = resultData.getData();
+ int[] bufferPeriodArray = null;
+ int maxBufferPeriod = Constant.UNKNOWN;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
+ maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ bufferPeriodArray = mNativePlayerBufferPeriodArray;
+ maxBufferPeriod = mNativePlayerMaxBufferPeriod;
+ break;
+ }
+ saveBufferPeriod(uri, bufferPeriodArray, maxBufferPeriod);
+ }
+ break;
+ case SETTINGS_ACTIVITY_REQUEST_CODE:
+ log("return from new settings!");
- saveToWavefile(uri);
+ // here we wipe out all previous results, in order to avoid the condition where
+ // previous results does not match the new settings
+ resetResults();
+ refreshState();
+ refreshPlots();
+ break;
}
- } else if( requestCode == SAVE_TO_PNG_REQUEST && resultCode == Activity.RESULT_OK) {
+ }
+ }
- log("got SAVE TO PNG intent back!");
- Uri uri = null;
- if (resultData != null) {
- uri = resultData.getData();
- saveScreenShot(uri);
- }
- } else if (requestCode == SETTINGS_ACTIVITY_REQUEST_CODE &&
- resultCode == Activity.RESULT_OK) {
- //new settings!
- log("return from settings!");
- refreshState();
- }
+ /**
+ * Refresh the sound level bar on the main activity to reflect the current sound level
+ * of the system.
+ */
+ private void refreshSoundLevelBar() {
+ mBarMasterLevel.setEnabled(true);
+ AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+ int currentVolume = am.getStreamVolume(AudioManager.STREAM_MUSIC);
+ mBarMasterLevel.setProgress(currentVolume);
+ }
+
+
+ /** Reset all results gathered from previous round of test (if any). */
+ private void resetResults() {
+ mCorrelation.mEstimatedLatencyMs = 0;
+ mCorrelation.mEstimatedLatencyConfidence = 0;
+ mRecorderBufferPeriod.resetRecord();
+ mPlayerBufferPeriod.resetRecord();
+ mNativeRecorderBufferPeriodArray = null;
+ mNativePlayerBufferPeriodArray = null;
+ mGlitchesData = null;
+ mWaveData = null;
}
- // method to get the file path from uri. Doesn't work for all devices
- public String getPath1(Uri uri)
- {
+
+ /** Get the file path from uri. Doesn't work for all devices. */
+ private String getPath(Uri uri) {
String[] projection = {MediaStore.Images.Media.DATA};
Cursor cursor1 = getContentResolver().query(uri, projection, null, null, null);
if (cursor1 == null) {
- // cursor1.close();
return uri.getPath();
}
@@ -442,37 +1027,32 @@ public class LoopbackActivity extends Activity {
}
-
- /** Called when the user clicks the button */
+ /** Zoom out the plot to its full size. */
public void onButtonZoomOutFull(View view) {
-
double fullZoomOut = mWavePlotView.getMaxZoomOut();
-
mWavePlotView.setZoom(fullZoomOut);
mWavePlotView.refreshGraph();
}
- public void onButtonZoomOut(View view) {
+ /** Zoom out the plot. */
+ public void onButtonZoomOut(View view) {
double zoom = mWavePlotView.getZoom();
-
- zoom = 2.0 *zoom;
+ zoom = 2.0 * zoom;
mWavePlotView.setZoom(zoom);
mWavePlotView.refreshGraph();
}
-
- /** Called when the user clicks the button */
+ /** Zoom in the plot. */
public void onButtonZoomIn(View view) {
-
double zoom = mWavePlotView.getZoom();
-
- zoom = zoom/2.0;
+ zoom = zoom / 2.0;
mWavePlotView.setZoom(zoom);
mWavePlotView.refreshGraph();
}
+
/*
public void onButtonZoomInFull(View view) {
@@ -483,8 +1063,10 @@ public class LoopbackActivity extends Activity {
}
*/
+
+ /** Go to AboutActivity. */
public void onButtonAbout(View view) {
- if(!isBusy()) {
+ if (!isBusy()) {
Intent aboutIntent = new Intent(this, AboutActivity.class);
startActivity(aboutIntent);
} else
@@ -492,154 +1074,228 @@ public class LoopbackActivity extends Activity {
}
- public void onButtonBufferPeriod(View view) {
- if(!isBusy()) {
- HistogramView.setBufferPeriodArray(BufferPeriod.getBufferPeriodArray());
- HistogramView.setMaxBufferPeriod(BufferPeriod.getMaxBufferPeriod());
+ /** Go to RecorderBufferPeriodActivity */
+ public void onButtonRecorderBufferPeriod(View view) {
+ if (!isBusy()) {
+ Intent RecorderBufferPeriodIntent = new Intent(this,
+ RecorderBufferPeriodActivity.class);
+ int recorderBufferSizeInFrames = mRecorderBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+ log("recorderBufferSizeInFrames:" + recorderBufferSizeInFrames);
+
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodTimeStampArray",
+ mRecorderBufferPeriod.getBufferPeriodTimeStampArray());
+ RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodArray",
+ mRecorderBufferPeriod.getBufferPeriodArray());
+ RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodMax",
+ mRecorderBufferPeriod.getMaxBufferPeriod());
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ // TODO change code in sles.cpp to collect timeStamp in native mode as well
+ RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodArray",
+ mNativeRecorderBufferPeriodArray);
+ RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodMax",
+ mNativeRecorderMaxBufferPeriod);
+ break;
+ }
+
+ RecorderBufferPeriodIntent.putExtra("recorderBufferSize", recorderBufferSizeInFrames);
+ RecorderBufferPeriodIntent.putExtra("samplingRate", mSamplingRate);
+ startActivity(RecorderBufferPeriodIntent);
+ } else
+ showToast("Test in progress... please wait");
+ }
+
+
+ /** Go to PlayerBufferPeriodActivity */
+ public void onButtonPlayerBufferPeriod(View view) {
+ if (!isBusy()) {
+ Intent PlayerBufferPeriodIntent = new Intent(this, PlayerBufferPeriodActivity.class);
+ int playerBufferSizeInFrames = mPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ PlayerBufferPeriodIntent.putExtra("playerBufferPeriodTimeStampArray",
+ mPlayerBufferPeriod.getBufferPeriodTimeStampArray());
+ PlayerBufferPeriodIntent.putExtra("playerBufferPeriodArray",
+ mPlayerBufferPeriod.getBufferPeriodArray());
+ PlayerBufferPeriodIntent.putExtra("playerBufferPeriodMax",
+ mPlayerBufferPeriod.getMaxBufferPeriod());
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ // TODO change code in sles.cpp to collect timeStamp in native mode as well
+ PlayerBufferPeriodIntent.putExtra("playerBufferPeriodArray",
+ mNativePlayerBufferPeriodArray);
+ PlayerBufferPeriodIntent.putExtra("playerBufferPeriodMax",
+ mNativePlayerMaxBufferPeriod);
+ break;
+ }
- Intent aboutIntent = new Intent(this, BufferPeriodActivity.class);
- startActivity(aboutIntent);
+ PlayerBufferPeriodIntent.putExtra("playerBufferSize", playerBufferSizeInFrames);
+ PlayerBufferPeriodIntent.putExtra("samplingRate", mSamplingRate);
+ startActivity(PlayerBufferPeriodIntent);
} else
showToast("Test in progress... please wait");
}
- /** Called when the user clicks the button */
- public void onButtonSettings(View view) {
- if(!isBusy()) {
+ /** Go to GlitchesActivity. */
+ public void onButtonGlitches(View view) {
+ if (!isBusy()) {
+ if (mGlitchesData != null) {
+ int numberOfGlitches = estimateNumberOfGlitches(mGlitchesData);
+ Intent GlitchesIntent = new Intent(this, GlitchesActivity.class);
+ GlitchesIntent.putExtra("glitchesArray", mGlitchesData);
+ GlitchesIntent.putExtra("FFTSamplingSize", mFFTSamplingSize);
+ GlitchesIntent.putExtra("FFTOverlapSamples", mFFTOverlapSamples);
+ GlitchesIntent.putExtra("samplingRate", mSamplingRate);
+ GlitchesIntent.putExtra("glitchingIntervalTooLong", mGlitchingIntervalTooLong);
+ GlitchesIntent.putExtra("numberOfGlitches", numberOfGlitches);
+ startActivity(GlitchesIntent);
+ } else {
+ showToast("Please run the buffer test to get data");
+ }
+
+ } else
+ showToast("Test in progress... please wait");
+ }
+
+
+ /** Go to SettingsActivity. */
+ public void onButtonSettings(View view) {
+ if (!isBusy()) {
Intent mySettingsIntent = new Intent(this, SettingsActivity.class);
//send settings
startActivityForResult(mySettingsIntent, SETTINGS_ACTIVITY_REQUEST_CODE);
} else {
showToast("Test in progress... please wait");
-// Toast.makeText(getApplicationContext(), "Test in progress... please wait",
-// Toast.LENGTH_SHORT).show();
}
}
+
+ /** Redraw the plot according to mWaveData */
void refreshPlots() {
mWavePlotView.setData(mWaveData);
mWavePlotView.redraw();
}
- void refreshState() {
+ /** Refresh the text on the main activity that shows the app states and audio settings. */
+ void refreshState() {
log("refreshState!");
- Button buttonTest = (Button) findViewById(R.id.buttonTest);
-
//get current audio level
AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
-
int currentVolume = am.getStreamVolume(AudioManager.STREAM_MUSIC);
mBarMasterLevel.setProgress(currentVolume);
- mTextViewCurrentLevel.setText(String.format("Level: %d/%d", currentVolume,
+ mTextViewCurrentLevel.setText(String.format("Sound Level: %d/%d", currentVolume,
mBarMasterLevel.getMax()));
log("refreshState 2b");
- //get info
+ // get info
int samplingRate = getApp().getSamplingRate();
- int playbackBuffer = getApp().getPlayBufferSizeInBytes()/getApp().BYTES_PER_FRAME;
- int recordBuffer = getApp().getRecordBufferSizeInBytes()/getApp().BYTES_PER_FRAME;
+ int playerBuffer = getApp().getPlayerBufferSizeInBytes() / Constant.BYTES_PER_FRAME;
+ int recorderBuffer = getApp().getRecorderBufferSizeInBytes() / Constant.BYTES_PER_FRAME;
StringBuilder s = new StringBuilder(200);
s.append("SR: " + samplingRate + " Hz");
int audioThreadType = getApp().getAudioThreadType();
- switch(audioThreadType) {
- case LoopbackApplication.AUDIO_THREAD_TYPE_JAVA:
- s.append(" Play Frames: " + playbackBuffer);
- s.append(" Record Frames: " + recordBuffer);
- s.append(" Audio: JAVA");
+ switch (audioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ s.append(" Play Frames: " + playerBuffer);
+ s.append(" Record Frames: " + recorderBuffer);
+ s.append(" Audio: JAVA");
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ s.append(" Frames: " + playerBuffer);
+ s.append(" Audio: NATIVE");
break;
- case LoopbackApplication.AUDIO_THREAD_TYPE_NATIVE:
- s.append(" Frames: " + playbackBuffer);
- s.append(" Audio: NATIVE");
- break;
}
- //mic source
+ // mic source
int micSource = getApp().getMicSource();
String micSourceName = getApp().getMicSourceString(micSource);
- if(micSourceName != null) {
+ if (micSourceName != null) {
s.append(String.format(" Mic: %s", micSourceName));
}
-/*
- s.append(" App");
-*/
String info = getApp().getSystemInfo();
s.append(" " + info);
+ // show buffer test duration
+ int bufferTestDuration = getApp().getBufferTestDuration();
+ s.append("\nBuffer Test Duration: " + bufferTestDuration + "s");
+
+ // show buffer test wave plot duration
+ int bufferTestWavePlotDuration = getApp().getBufferTestWavePlotDuration();
+ s.append(" Buffer Test Wave Plot Duration: last " + bufferTestWavePlotDuration + "s");
+
mTextInfo.setText(s.toString());
-/*
- if(mCorrelation.mEstimatedLatencyMs>0.0001) {
- mTextViewEstimatedLatency.setText(String.format("Latency: %.2f ms", mCorrelation.mEstimatedLatencyMs));
- } else {
- mTextViewEstimatedLatency.setText(String.format("Latency: ----"));
+ String estimatedLatency = "----";
+
+ if (mCorrelation.mEstimatedLatencyMs > 0.0001) {
+ estimatedLatency = String.format("%.2f ms", mCorrelation.mEstimatedLatencyMs);
}
-*/
+ mTextViewEstimatedLatency.setText(String.format("Latency: %s Confidence: %.2f",
+ estimatedLatency, mCorrelation.mEstimatedLatencyConfidence));
}
+
private static void log(String msg) {
- Log.v("Recorder", msg);
+ Log.v(TAG, msg);
}
- public void showToast(String msg) {
- if(toast == null) {
- toast = Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT);
+ public void showToast(String msg) {
+ if (mToast == null) {
+ mToast = Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT);
} else {
- toast.setText(msg);
-
+ mToast.setText(msg);
}
-
-
{
-// toast.setText(msg);
- toast.setGravity(Gravity.CENTER_VERTICAL | Gravity.CENTER_HORIZONTAL, 10, 10);
- toast.show();
+ mToast.setGravity(Gravity.CENTER_VERTICAL | Gravity.CENTER_HORIZONTAL, 10, 10);
+ mToast.show();
}
}
+
+ /** Get the application that runs this activity. Wrapper for getApplication(). */
private LoopbackApplication getApp() {
return (LoopbackApplication) this.getApplication();
}
- void saveToWavefile(Uri uri) {
- // double [] data = audioThread.getWaveData();
- if (mWaveData != null && mWaveData.length > 0 ) {
+ /** Save a .wav file of the wave plot on the main activity. */
+ void saveToWaveFile(Uri uri) {
+ if (mWaveData != null && mWaveData.length > 0) {
AudioFileOutput audioFileOutput = new AudioFileOutput(getApplicationContext(), uri,
- mSamplingRate);
+ mSamplingRate);
boolean status = audioFileOutput.writeData(mWaveData);
-
if (status) {
- showToast("Finished exporting wave File " + mFilePathWav);
-// Toast.makeText(getApplicationContext(), "Finished exporting wave File",
-// Toast.LENGTH_SHORT).show();
+ showToast("Finished exporting wave File " + mWaveFilePath);
} else {
showToast("Something failed saving wave file");
-// Toast.makeText(getApplicationContext(), "Something failed saving wave file",
-// Toast.LENGTH_SHORT).show();
}
- }
+ }
}
- void saveScreenShot(Uri uri) {
- boolean status = false;
+ /** Save a screenshot of the main activity. */
+ void saveScreenShot(Uri uri) {
ParcelFileDescriptor parcelFileDescriptor = null;
- FileOutputStream outputStream = null;
+ FileOutputStream outputStream;
try {
- parcelFileDescriptor = getApplicationContext().getContentResolver().openFileDescriptor(uri, "w");
+ parcelFileDescriptor = getApplicationContext().getContentResolver().
+ openFileDescriptor(uri, "w");
FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
- outputStream= new FileOutputStream(fileDescriptor);
+ outputStream = new FileOutputStream(fileDescriptor);
log("Done creating output stream");
@@ -648,20 +1304,13 @@ public class LoopbackActivity extends Activity {
View v = LL.getRootView();
v.setDrawingCacheEnabled(true);
Bitmap b = v.getDrawingCache();
- //BitmapDrawable bitmapDrawable = new BitmapDrawable(b);
//save
b.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
-// int sampleCount = data.length;
-// writeHeader(sampleCount);
-// writeDataBufer(data);
-// mOutputStream.close();
- status = true;
parcelFileDescriptor.close();
v.setDrawingCacheEnabled(false);
} catch (Exception e) {
- outputStream = null;
- log("Failed to open png" +e);
+ log("Failed to open png file " + e);
} finally {
try {
if (parcelFileDescriptor != null) {
@@ -672,7 +1321,279 @@ public class LoopbackActivity extends Activity {
log("Error closing ParcelFile Descriptor");
}
}
+ }
+
+
+ /**
+ * Save a .txt file of the given buffer period's data.
+ * First column is time, second column is count.
+ */
+ void saveBufferPeriod(Uri uri, int[] bufferPeriodArray, int maxBufferPeriod) {
+ ParcelFileDescriptor parcelFileDescriptor = null;
+ FileOutputStream outputStream;
+ if (bufferPeriodArray != null) {
+ try {
+ parcelFileDescriptor = getApplicationContext().getContentResolver().
+ openFileDescriptor(uri, "w");
+
+ FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+ outputStream = new FileOutputStream(fileDescriptor);
+ log("Done creating output stream for saving buffer period");
+
+ int usefulDataRange = Math.min(maxBufferPeriod + 1, bufferPeriodArray.length);
+ int[] usefulBufferData = Arrays.copyOfRange(bufferPeriodArray, 0, usefulDataRange);
+
+ String endline = "\n";
+ String tab = "\t";
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < usefulBufferData.length; i++) {
+ sb.append(i + tab + usefulBufferData[i] + endline);
+ }
+
+ outputStream.write(sb.toString().getBytes());
+ parcelFileDescriptor.close();
+
+ } catch (Exception e) {
+ log("Failed to open text file " + e);
+ } finally {
+ try {
+ if (parcelFileDescriptor != null) {
+ parcelFileDescriptor.close();
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ log("Error closing ParcelFile Descriptor");
+ }
+ }
+ }
}
+ /** Save a .txt file of various test results. */
+ void saveReport(Uri uri) {
+ ParcelFileDescriptor parcelFileDescriptor = null;
+ FileOutputStream outputStream;
+ try {
+ parcelFileDescriptor = getApplicationContext().getContentResolver().
+ openFileDescriptor(uri, "w");
+
+ FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+ outputStream = new FileOutputStream(fileDescriptor);
+
+ log("Done creating output stream");
+
+ String endline = "\n";
+ final int stringLength = 300;
+ StringBuilder sb = new StringBuilder(stringLength);
+ sb.append("DateTime = " + mCurrentTime + endline);
+ sb.append(INTENT_SAMPLING_FREQUENCY + " = " + getApp().getSamplingRate() + endline);
+ sb.append(INTENT_RECORDER_BUFFER + " = " + getApp().getRecorderBufferSizeInBytes() /
+ Constant.BYTES_PER_FRAME + endline);
+ sb.append(INTENT_PLAYER_BUFFER + " = "
+ + getApp().getPlayerBufferSizeInBytes() / Constant.BYTES_PER_FRAME + endline);
+ sb.append(INTENT_AUDIO_THREAD + " = " + getApp().getAudioThreadType() + endline);
+ int micSource = getApp().getMicSource();
+
+
+ String audioType = "unknown";
+ switch (getApp().getAudioThreadType()) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ audioType = "JAVA";
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ audioType = "NATIVE";
+ break;
+ }
+ sb.append(INTENT_AUDIO_THREAD + "_String = " + audioType + endline);
+
+ sb.append(INTENT_MIC_SOURCE + " = " + micSource + endline);
+ sb.append(INTENT_MIC_SOURCE + "_String = " + getApp().getMicSourceString(micSource)
+ + endline);
+ AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+
+ int currentVolume = am.getStreamVolume(AudioManager.STREAM_MUSIC);
+ sb.append(INTENT_AUDIO_LEVEL + " = " + currentVolume + endline);
+
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ if (mCorrelation.mEstimatedLatencyMs > 0.0001) {
+ sb.append(String.format("LatencyMs = %.2f", mCorrelation.mEstimatedLatencyMs)
+ + endline);
+ } else {
+ sb.append(String.format("LatencyMs = unknown") + endline);
+ }
+
+ sb.append(String.format("LatencyConfidence = %.2f",
+ mCorrelation.mEstimatedLatencyConfidence) + endline);
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ sb.append("Buffer Test Duration (s) = " + mBufferTestDuration + endline);
+
+ // report expected recorder buffer period
+ int expectedRecorderBufferPeriod = mRecorderBufferSizeInBytes /
+ Constant.BYTES_PER_FRAME * Constant.MILLIS_PER_SECOND / mSamplingRate;
+ sb.append("Expected Recorder Buffer Period (ms) = " + expectedRecorderBufferPeriod +
+ endline);
+
+ // report recorder results
+ int recorderBufferSize = mRecorderBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+ int[] recorderBufferData = null;
+ int recorderBufferDataMax = 0;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ recorderBufferData = mRecorderBufferPeriod.getBufferPeriodArray();
+ recorderBufferDataMax = mRecorderBufferPeriod.getMaxBufferPeriod();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ recorderBufferData = mNativeRecorderBufferPeriodArray;
+ recorderBufferDataMax = mNativeRecorderMaxBufferPeriod;
+ break;
+ }
+ if (recorderBufferData != null) {
+ // this is the range of data that actually has values
+ int usefulDataRange = Math.min(recorderBufferDataMax + 1,
+ recorderBufferData.length);
+ int[] usefulBufferData = Arrays.copyOfRange(recorderBufferData, 0,
+ usefulDataRange);
+ PerformanceMeasurement measurement = new PerformanceMeasurement(
+ recorderBufferSize, mSamplingRate, usefulBufferData);
+ boolean isBufferSizesMismatch = measurement.determineIsBufferSizesMatch();
+ double benchmark = measurement.computeWeightedBenchmark();
+ int outliers = measurement.countOutliers();
+ sb.append("Recorder Buffer Sizes Mismatch = " + isBufferSizesMismatch +
+ endline);
+ sb.append("Recorder Benchmark = " + benchmark + endline);
+ sb.append("Recorder Number of Outliers = " + outliers + endline);
+ } else {
+ sb.append("Cannot Find Recorder Buffer Period Data!" + endline);
+ }
+
+ // report player results
+ int playerBufferSize = mPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+ int[] playerBufferData = null;
+ int playerBufferDataMax = 0;
+ switch (mAudioThreadType) {
+ case Constant.AUDIO_THREAD_TYPE_JAVA:
+ playerBufferData = mPlayerBufferPeriod.getBufferPeriodArray();
+ playerBufferDataMax = mPlayerBufferPeriod.getMaxBufferPeriod();
+ break;
+ case Constant.AUDIO_THREAD_TYPE_NATIVE:
+ playerBufferData = mNativePlayerBufferPeriodArray;
+ playerBufferDataMax = mNativePlayerMaxBufferPeriod;
+ break;
+ }
+ if (playerBufferData != null) {
+ // this is the range of data that actually has values
+ int usefulDataRange = Math.min(playerBufferDataMax + 1,
+ playerBufferData.length);
+ int[] usefulBufferData = Arrays.copyOfRange(playerBufferData, 0,
+ usefulDataRange);
+ PerformanceMeasurement measurement = new PerformanceMeasurement(
+ playerBufferSize, mSamplingRate, usefulBufferData);
+ boolean isBufferSizesMismatch = measurement.determineIsBufferSizesMatch();
+ double benchmark = measurement.computeWeightedBenchmark();
+ int outliers = measurement.countOutliers();
+ sb.append("Player Buffer Sizes Mismatch = " + isBufferSizesMismatch + endline);
+ sb.append("Player Benchmark = " + benchmark + endline);
+ sb.append("Player Number of Outliers = " + outliers + endline);
+
+ } else {
+ sb.append("Cannot Find Player Buffer Period Data!" + endline);
+ }
+
+ // report expected player buffer period
+ int expectedPlayerBufferPeriod = mPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME
+ * Constant.MILLIS_PER_SECOND / mSamplingRate;
+ if (audioType.equals("JAVA")) {
+ // javaPlayerMultiple depends on the samples written per AudioTrack.write()
+ int javaPlayerMultiple = 2;
+ expectedPlayerBufferPeriod *= javaPlayerMultiple;
+ }
+ sb.append("Expected Player Buffer Period (ms) = " + expectedPlayerBufferPeriod +
+ endline);
+
+ // report estimated number of glitches
+ int numberOfGlitches = estimateNumberOfGlitches(mGlitchesData);
+ sb.append("Estimated Number of Glitches = " + numberOfGlitches + endline);
+
+ // report if the total glitching interval is too long
+ sb.append("Total glitching interval too long: " +
+ mGlitchingIntervalTooLong + endline);
+ }
+
+
+ String info = getApp().getSystemInfo();
+ sb.append("SystemInfo = " + info + endline);
+
+ outputStream.write(sb.toString().getBytes());
+ parcelFileDescriptor.close();
+ } catch (Exception e) {
+ log("Failed to open text file " + e);
+ } finally {
+ try {
+ if (parcelFileDescriptor != null) {
+ parcelFileDescriptor.close();
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ log("Error closing ParcelFile Descriptor");
+ }
+ }
+
+ }
+
+
+ /**
+ * Estimate the number of glitches. This version of estimation will count two consecutive
+ * glitching intervals as one glitch. This is because two time intervals are partly overlapped.
+ * Note: If the total glitching intervals exceed the length of glitchesData, this estimation
+ * becomes incomplete. However, whether or not the total glitching interval is too long will
+ * also be indicated, and in the case it's true, we know something went wrong.
+ */
+ private static int estimateNumberOfGlitches(int[] glitchesData) {
+ final int discard = 10; // don't count glitches occurring at the first few FFT interval
+ boolean isPreviousGlitch = false; // is there a glitch in previous interval or not
+ int previousFFTInterval = -1;
+ int count = 0;
+ // if there are three consecutive glitches, the first two will be counted as one,
+ // the third will be counted as another one
+ for (int i = 0; i < glitchesData.length; i++) {
+ if (glitchesData[i] > discard) {
+ if (glitchesData[i] == previousFFTInterval + 1 && isPreviousGlitch) {
+ isPreviousGlitch = false;
+ previousFFTInterval = glitchesData[i];
+ } else {
+ isPreviousGlitch = true;
+ previousFFTInterval = glitchesData[i];
+ count += 1;
+ }
+ }
+
+ }
+
+ return count;
+ }
+
+
+ /**
+ * Estimate the number of glitches. This version of estimation will count the whole consecutive
+ * intervals as one glitch. This version is not currently used.
+ * Note: If the total glitching intervals exceed the length of glitchesData, this estimation
+ * becomes incomplete. However, whether or not the total glitching interval is too long will
+ * also be indicated, and in the case it's true, we know something went wrong.
+ */
+ private static int estimateNumberOfGlitches2(int[] glitchesData) {
+ final int discard = 10; // don't count glitches occurring at the first few FFT interval
+ int previousFFTInterval = -1;
+ int count = 0;
+ for (int i = 0; i < glitchesData.length; i++) {
+ if (glitchesData[i] > discard) {
+ if (glitchesData[i] != previousFFTInterval + 1) {
+ count += 1;
+ }
+ previousFFTInterval = glitchesData[i];
+ }
+ }
+ return count;
+ }
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
index 1c097cb..c57659f 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
@@ -28,138 +28,173 @@ import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
+
+/**
+ * This class maintain global application states, so it also keeps and computes the default
+ * values of all the audio settings.
+ */
+
public class LoopbackApplication extends Application {
+ private static final String TAG = "LoopbackApplication";
+ // here defines all the initial setting values, some get modified in ComputeDefaults()
private int mSamplingRate = 48000;
- private int mPlayBufferSizeInBytes = 0;
- private int mRecordBuffSizeInBytes = 0;
- private int mAudioThreadType = 0; //0:Java, 1:Native (JNI)
+ private int mPlayerBufferSizeInBytes = 0; // for both native and java
+ private int mRecorderBuffSizeInBytes = 0; // for both native and java
+ private int mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA; //0:Java, 1:Native (JNI)
private int mMicSource = 3; //maps to MediaRecorder.AudioSource.VOICE_RECOGNITION;
+ private int mBufferTestDurationInSeconds = 5;
+ private int mBufferTestWavePlotDurationInSeconds = 7;
- public static final int AUDIO_THREAD_TYPE_JAVA = 0;
- public static final int AUDIO_THREAD_TYPE_NATIVE = 1;
-
- public static final int BYTES_PER_FRAME = 2;
-
- public void setDefaults () {
+ public void setDefaults() {
if (isSafeToUseSles()) {
- mAudioThreadType = AUDIO_THREAD_TYPE_NATIVE;
+ mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE;
} else {
-
- mAudioThreadType = AUDIO_THREAD_TYPE_JAVA;
+ mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA;
}
+
computeDefaults();
}
+
int getSamplingRate() {
return mSamplingRate;
}
+
void setSamplingRate(int samplingRate) {
mSamplingRate = samplingRate;
}
+
int getAudioThreadType() {
return mAudioThreadType;
}
+
void setAudioThreadType(int audioThreadType) {
mAudioThreadType = audioThreadType;
}
- int getMicSource() { return mMicSource; }
+
+ int getMicSource() {
+ return mMicSource;
+ }
+
+
int mapMicSource(int threadType, int source) {
int mappedSource = 0;
-// <item>DEFAULT</item>
-// <item>MIC</item>
-// <item>CAMCORDER</item>
-// <item>VOICE_RECOGNITION</item>
-// <item>VOICE_COMMUNICATION</item>
-
- if(threadType == AUDIO_THREAD_TYPE_JAVA) {
+ //experiment with remote submix
+ if (threadType == Constant.AUDIO_THREAD_TYPE_JAVA) {
switch (source) {
- default:
- case 0: //DEFAULT
- mappedSource = MediaRecorder.AudioSource.DEFAULT;
- break;
- case 1: //MIC
- mappedSource = MediaRecorder.AudioSource.MIC;
- break;
- case 2: //CAMCORDER
- mappedSource = MediaRecorder.AudioSource.CAMCORDER;
- break;
- case 3: //VOICE_RECOGNITION
- mappedSource = MediaRecorder.AudioSource.VOICE_RECOGNITION;
- break;
- case 4: //VOICE_COMMUNICATION
- mappedSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
- break;
+ default:
+ case 0: //DEFAULT
+ mappedSource = MediaRecorder.AudioSource.DEFAULT;
+ break;
+ case 1: //MIC
+ mappedSource = MediaRecorder.AudioSource.MIC;
+ break;
+ case 2: //CAMCORDER
+ mappedSource = MediaRecorder.AudioSource.CAMCORDER;
+ break;
+ case 3: //VOICE_RECOGNITION
+ mappedSource = MediaRecorder.AudioSource.VOICE_RECOGNITION;
+ break;
+ case 4: //VOICE_COMMUNICATION
+ mappedSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
+ break;
+ case 5: //REMOTE_SUBMIX (JAVA ONLY)
+ mappedSource = MediaRecorder.AudioSource.REMOTE_SUBMIX;
+ break;
}
- } else if (threadType == AUDIO_THREAD_TYPE_NATIVE ) {
-
+ } else if (threadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
//taken form OpenSLES_AndroidConfiguration.h
switch (source) {
- default:
- case 0: //DEFAULT
- mappedSource = 0x00; //SL_ANDROID_RECORDING_PRESET_NONE
- break;
- case 1: //MIC
- mappedSource = 0x01; //SL_ANDROID_RECORDING_PRESET_GENERIC
- break;
- case 2: //CAMCORDER
- mappedSource = 0x02; //SL_ANDROID_RECORDING_PRESET_CAMCORDER
- break;
- case 3: //VOICE_RECOGNITION
- mappedSource = 0x03; //SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION
- break;
- case 4: //VOICE_COMMUNICATION
- mappedSource = 0x04; //SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION
- break;
+ default:
+ case 0: //DEFAULT
+ mappedSource = 0x00; //SL_ANDROID_RECORDING_PRESET_NONE
+ break;
+ case 1: //MIC
+ mappedSource = 0x01; //SL_ANDROID_RECORDING_PRESET_GENERIC
+ break;
+ case 2: //CAMCORDER
+ mappedSource = 0x02; //SL_ANDROID_RECORDING_PRESET_CAMCORDER
+ break;
+ case 3: //VOICE_RECOGNITION
+ mappedSource = 0x03; //SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION
+ break;
+ case 4: //VOICE_COMMUNICATION
+ mappedSource = 0x04; //SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION
+ break;
}
}
return mappedSource;
}
- String getMicSourceString(int source) {
+ String getMicSourceString(int source) {
String name = null;
-
String[] myArray = getResources().getStringArray(R.array.mic_source_array);
- if(myArray != null && source>=0 && source < myArray.length) {
+
+ if (myArray != null && source >= 0 && source < myArray.length) {
name = myArray[source];
}
return name;
}
+
void setMicSource(int micSource) { mMicSource = micSource; }
- int getPlayBufferSizeInBytes() {
- return mPlayBufferSizeInBytes;
+
+ int getPlayerBufferSizeInBytes() {
+ return mPlayerBufferSizeInBytes;
}
- void setPlayBufferSizeInBytes(int playBufferSizeInBytes) {
- mPlayBufferSizeInBytes = playBufferSizeInBytes;
+
+ void setPlayerBufferSizeInBytes(int playerBufferSizeInBytes) {
+ mPlayerBufferSizeInBytes = playerBufferSizeInBytes;
}
- int getRecordBufferSizeInBytes() {
- return mRecordBuffSizeInBytes;
+
+ int getRecorderBufferSizeInBytes() {
+ return mRecorderBuffSizeInBytes;
}
- void setRecordBufferSizeInBytes(int recordBufferSizeInBytes) {
- mRecordBuffSizeInBytes = recordBufferSizeInBytes;
+
+ void setRecorderBufferSizeInBytes(int recorderBufferSizeInBytes) {
+ mRecorderBuffSizeInBytes = recorderBufferSizeInBytes;
}
- public void computeDefaults() {
- int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
- setSamplingRate(samplingRate);
+ int getBufferTestDuration() {
+ return mBufferTestDurationInSeconds;
+ }
+
+
+ void setBufferTestDuration(int bufferTestDurationInSeconds) {
+ mBufferTestDurationInSeconds = bufferTestDurationInSeconds;
+ }
+
+
+ int getBufferTestWavePlotDuration() {
+ return mBufferTestWavePlotDurationInSeconds;
+ }
+
+
+ void setBufferTestWavePlotDuration(int bufferTestWavePlotDurationInSeconds) {
+ mBufferTestWavePlotDurationInSeconds = bufferTestWavePlotDurationInSeconds;
+ }
+ /** Compute Default audio settings. */
+ public void computeDefaults() {
+ int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+ setSamplingRate(samplingRate);
- if( mAudioThreadType == AUDIO_THREAD_TYPE_NATIVE) {
+ if (mAudioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
int minBufferSizeInFrames;
if (isSafeToUseGetProperty()) {
@@ -168,37 +203,34 @@ public class LoopbackApplication extends Application {
minBufferSizeInFrames = Integer.parseInt(value);
} else {
minBufferSizeInFrames = 1024;
- log("On button test micSource Name: " );
+ log("On button test micSource Name: ");
}
- int minBufferSizeInBytes = BYTES_PER_FRAME * minBufferSizeInFrames;
+ int minBufferSizeInBytes = Constant.BYTES_PER_FRAME * minBufferSizeInFrames;
- setPlayBufferSizeInBytes(minBufferSizeInBytes);
- setRecordBufferSizeInBytes(minBufferSizeInBytes);
+ setPlayerBufferSizeInBytes(minBufferSizeInBytes);
+ setRecorderBufferSizeInBytes(minBufferSizeInBytes);
} else {
+ int minPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
+ AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
+ setPlayerBufferSizeInBytes(minPlayerBufferSizeInBytes);
- int minPlayBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
- AudioFormat.CHANNEL_OUT_MONO,
- AudioFormat.ENCODING_PCM_16BIT);
- setPlayBufferSizeInBytes(minPlayBufferSizeInBytes);
-
- int minRecBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate,
- AudioFormat.CHANNEL_IN_MONO,
- AudioFormat.ENCODING_PCM_16BIT);
- setRecordBufferSizeInBytes(minRecBufferSizeInBytes);
+ int minRecorderBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate,
+ AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
+ setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
}
- //log("computed defaults");
-
}
- String getSystemInfo() {
+ String getSystemInfo() {
String info = null;
-
try {
- int versionCode = getApplicationContext().getPackageManager().getPackageInfo(getApplicationContext().getPackageName(), 0).versionCode;
- String versionName = getApplicationContext().getPackageManager().getPackageInfo(getApplicationContext().getPackageName(), 0).versionName;
- info = String.format("App ver. " +versionCode +"."+ versionName + " | " +Build.MODEL + " | " + Build.FINGERPRINT);
+ int versionCode = getApplicationContext().getPackageManager().getPackageInfo(
+ getApplicationContext().getPackageName(), 0).versionCode;
+ String versionName = getApplicationContext().getPackageManager().getPackageInfo(
+ getApplicationContext().getPackageName(), 0).versionName;
+ info = "App ver. " + versionCode + "." + versionName + " | " + Build.MODEL + " | " +
+ Build.FINGERPRINT;
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
@@ -206,19 +238,25 @@ public class LoopbackApplication extends Application {
return info;
}
+
+ /** Check if it's safe to use Open SLES. */
boolean isSafeToUseSles() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD;
}
+
+ /** Check if it's safe to use getProperty(). */
boolean isSafeToUseGetProperty() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
}
+
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
+
@Override
public void onCreate() {
super.onCreate();
@@ -226,17 +264,21 @@ public class LoopbackApplication extends Application {
setDefaults();
}
+
@Override
public void onLowMemory() {
super.onLowMemory();
}
+
@Override
public void onTerminate() {
super.onTerminate();
}
+
private static void log(String msg) {
- Log.v("Recorder", msg);
+ Log.v(TAG, msg);
}
+
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
index e2eeec6..6637bb6 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
@@ -16,519 +16,362 @@
package org.drrickorang.loopback;
-//import android.content.Context;
-//import android.app.Activity;
+import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
-//import android.media.MediaPlayer;
-import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.util.Log;
-
import android.os.Handler;
-import android.os.Message;
-import org.drrickorang.loopback.BufferPeriod;
+import android.os.Message;
+
/**
* A thread/audio track based audio synth.
*/
-public class LoopbackAudioThread extends Thread {
-
- public boolean isRunning = false;
- // private boolean isInitialized = false;
- double twoPi = 6.28318530718;
-
- public AudioTrack mAudioTrack;
- public int mSessionId;
-
- public double[] mvSamples; //captured samples
- int mSamplesIndex;
-
- private RecorderRunnable recorderRunnable;
- Thread mRecorderThread;
- public int mSamplingRate = 48000;
- private int mChannelConfigIn = AudioFormat.CHANNEL_IN_MONO;
- private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
-
- //Pipe mPipe = new Pipe(65536);
- PipeShort mPipe = new PipeShort(65536);
-
- int mMinPlayBufferSizeInBytes = 0;
- int mMinRecordBuffSizeInBytes = 0;
- private int mChannelConfigOut = AudioFormat.CHANNEL_OUT_MONO;
- // private byte[] mAudioByteArrayOut;
- private short[] mAudioShortArrayOut;
- int mMinPlayBufferSizeSamples = 0;
- int mMinRecordBufferSizeSamples = 0;
- int mMicSource = 0;
- boolean isPlaying = false;
- private Handler mMessageHandler;
-
- // private static long mStartTime = 0; // the start time of the current loop in "run()"
-
- static final int FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_STARTED = 992;
- static final int FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_ERROR = 993;
- static final int FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_COMPLETE = 994;
- static final int FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERROR = 995;
-
- public void setParams(int samplingRate, int playBufferInBytes, int recBufferInBytes, int micSource) {
+public class LoopbackAudioThread extends Thread {
+ private static final String TAG = "LoopbackAudioThread";
+
+ private static final int THREAD_SLEEP_DURATION_MS = 1;
+
+ // for latency test
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED = 991;
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR = 992;
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE = 993;
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP = 994;
+
+ // for buffer test
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED = 996;
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR = 997;
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE = 998;
+ static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP = 999;
+
+ public boolean mIsRunning = false;
+ public AudioTrack mAudioTrack;
+ public int mSessionId;
+ private Thread mRecorderThread;
+ private RecorderRunnable mRecorderRunnable;
+
+ private int mSamplingRate;
+ private int mChannelConfigIn = AudioFormat.CHANNEL_IN_MONO;
+ private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ private int mMinPlayerBufferSizeInBytes = 0;
+ private int mMinRecorderBuffSizeInBytes = 0;
+ private int mMinPlayerBufferSizeSamples = 0;
+ private int mMicSource;
+ private int mChannelConfigOut = AudioFormat.CHANNEL_OUT_MONO;
+ private boolean mIsPlaying = false;
+ private boolean mIsRequestStop = false;
+ private Handler mMessageHandler;
+ // This is the pipe that connects the player and the recorder in latency test.
+ private PipeShort mLatencyTestPipe = new PipeShort(Constant.MAX_SHORTS);
+
+ // for buffer test
+ private BufferPeriod mRecorderBufferPeriod; // used to collect recorder's buffer period
+ private BufferPeriod mPlayerBufferPeriod; // used to collect player's buffer period
+ private int mTestType; // latency test or buffer test
+ private int mBufferTestDurationInSeconds; // Duration of actual buffer test
+ private Context mContext;
+ private int mBufferTestWavePlotDurationInSeconds;
+ private boolean mIsAdjustingSoundLevel = true; // only used in buffer test
+
+
+ public LoopbackAudioThread(int samplingRate, int playerBufferInBytes, int recorderBufferInBytes,
+ int micSource, BufferPeriod recorderBufferPeriod,
+ BufferPeriod playerBufferPeriod, int testType,
+ int bufferTestDurationInSeconds,
+ int bufferTestWavePlotDurationInSeconds, Context context) {
mSamplingRate = samplingRate;
-
- mMinPlayBufferSizeInBytes = playBufferInBytes;
- mMinRecordBuffSizeInBytes = recBufferInBytes;
+ mMinPlayerBufferSizeInBytes = playerBufferInBytes;
+ mMinRecorderBuffSizeInBytes = recorderBufferInBytes;
mMicSource = micSource;
-
+ mRecorderBufferPeriod = recorderBufferPeriod;
+ mPlayerBufferPeriod = playerBufferPeriod;
+ mTestType = testType;
+ mBufferTestDurationInSeconds = bufferTestDurationInSeconds;
+ mBufferTestWavePlotDurationInSeconds = bufferTestWavePlotDurationInSeconds;
+ mContext = context;
}
+
public void run() {
setPriority(Thread.MAX_PRIORITY);
- if ( mMinPlayBufferSizeInBytes <= 0 ) {
- mMinPlayBufferSizeInBytes = AudioTrack.getMinBufferSize(mSamplingRate,mChannelConfigOut,
- mAudioFormat);
+ if (mMinPlayerBufferSizeInBytes <= 0) {
+ mMinPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(mSamplingRate,
+ mChannelConfigOut, mAudioFormat);
- log("Playback: computed min buff size = " + mMinPlayBufferSizeInBytes
- + " bytes");
+ log("Player: computed min buff size = " + mMinPlayerBufferSizeInBytes + " bytes");
} else {
- log("Plaback: using min buff size = " + mMinPlayBufferSizeInBytes
- + " bytes");
+ log("Player: using min buff size = " + mMinPlayerBufferSizeInBytes + " bytes");
}
- mMinPlayBufferSizeSamples = mMinPlayBufferSizeInBytes /2;
+ mMinPlayerBufferSizeSamples = mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+ short[] audioShortArrayOut = new short[mMinPlayerBufferSizeSamples];
+
+ // we may want to adjust this to different multiplication of mMinPlayerBufferSizeSamples
+ int audioTrackWriteDataSize = mMinPlayerBufferSizeSamples;
+
+ // used for buffer test only
+ final double frequency1 = Constant.PRIME_FREQUENCY_1;
+ final double frequency2 = Constant.PRIME_FREQUENCY_2; // not actually used
+ short[] bufferTestTone = new short[audioTrackWriteDataSize]; // used by AudioTrack.write()
+ ToneGeneration toneGeneration = new SineWaveTone(mSamplingRate, frequency1);
- // mAudioByteArrayOut = new byte[mMinPlayBufferSizeInBytes *4];
- mAudioShortArrayOut = new short[mMinPlayBufferSizeSamples];
+ mRecorderRunnable = new RecorderRunnable(mLatencyTestPipe, mSamplingRate, mChannelConfigIn,
+ mAudioFormat, mMinRecorderBuffSizeInBytes, MediaRecorder.AudioSource.MIC, this,
+ mRecorderBufferPeriod, mTestType, frequency1, frequency2,
+ mBufferTestWavePlotDurationInSeconds, mContext);
+ mRecorderRunnable.setBufferTestDurationInSeconds(mBufferTestDurationInSeconds);
+ mRecorderThread = new Thread(mRecorderRunnable);
- recorderRunnable = new RecorderRunnable(mPipe, mSamplingRate, mChannelConfigIn,
- mAudioFormat, mMinRecordBuffSizeInBytes, mMicSource);
- mRecorderThread = new Thread(recorderRunnable);
+ // both player and recorder run at max priority
+ mRecorderThread.setPriority(Thread.MAX_PRIORITY);
mRecorderThread.start();
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
mSamplingRate,
mChannelConfigOut,
mAudioFormat,
- mMinPlayBufferSizeInBytes,
- AudioTrack.MODE_STREAM /* FIXME runtime test for API level 9 ,
+ mMinPlayerBufferSizeInBytes,
+ AudioTrack.MODE_STREAM /* FIXME runtime test for API level 9,
mSessionId */);
- short samples[] = new short[mMinPlayBufferSizeInBytes];
-
- int amp = 10000;
- double fr = 440.0f;
- double phase = 0.0;
-
- if(recorderRunnable != null && mAudioTrack != null) {
-
-
- isPlaying = false;
- isRunning = true;
-
- while (isRunning) {
- if (isPlaying) {
- //using PIPE
- // int bytesAvailable = mPipe.availableToRead();
- int samplesAvailable = mPipe.availableToRead();
-
-// if (bytesAvailable>0 ) {
- if (samplesAvailable > 0) {
-
-// int bytesOfInterest = bytesAvailable;
- int samplesOfInterest = samplesAvailable;
-// if ( mMinPlayBufferSizeInBytes < bytesOfInterest )
-// bytesOfInterest = mMinPlayBufferSizeInBytes;
-//
- if (mMinPlayBufferSizeSamples < samplesOfInterest)
- samplesOfInterest = mMinPlayBufferSizeSamples;
-
-// mPipe.read( mAudioByteArrayOut, 0 , bytesOfInterest);
- int samplesRead = mPipe.read(mAudioShortArrayOut, 0, samplesOfInterest);
-// int bytesAvailableAfter = mPipe.availableToRead();
-// int samplesAvailableAfter = mPipe.availableToRead();
-
- //output
-// mAudioTrack.write(mAudioByteArrayOut, 0, bytesOfInterest);
- mAudioTrack.write(mAudioShortArrayOut, 0, samplesRead);
-
- if (!recorderRunnable.isStillRoomToRecord()) {
- //stop
- endTest();
-
+ if (mRecorderRunnable != null && mAudioTrack != null) {
+ mIsPlaying = false;
+ mIsRunning = true;
+
+ while (mIsRunning && mRecorderThread.isAlive()) {
+ if (mIsPlaying) {
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ // read from the pipe and plays it out
+ int samplesAvailable = mLatencyTestPipe.availableToRead();
+ if (samplesAvailable > 0) {
+ int samplesOfInterest = samplesAvailable;
+ if (mMinPlayerBufferSizeSamples < samplesOfInterest)
+ samplesOfInterest = mMinPlayerBufferSizeSamples;
+
+ int samplesRead = mLatencyTestPipe.read(audioShortArrayOut, 0,
+ samplesOfInterest);
+ mAudioTrack.write(audioShortArrayOut, 0, samplesRead);
+ mPlayerBufferPeriod.collectBufferPeriod();
}
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ // don't collect buffer period when we are still adjusting the sound level
+ if (mIsAdjustingSoundLevel) {
+ toneGeneration.generateTone(bufferTestTone, bufferTestTone.length);
+ mAudioTrack.write(bufferTestTone, 0, audioTrackWriteDataSize);
+ } else {
+ mPlayerBufferPeriod.collectBufferPeriod();
+ toneGeneration.generateTone(bufferTestTone, bufferTestTone.length);
+ mAudioTrack.write(bufferTestTone, 0, audioTrackWriteDataSize);
+ }
+ break;
}
-
} else {
- if (isRunning) {
+ // wait for a bit to allow AudioTrack to start playing
+ if (mIsRunning) {
try {
- sleep(1);
+ sleep(THREAD_SLEEP_DURATION_MS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
-
}
- } //end is running
+ }
+ endTest();
+
} else {
- //something went wrong, didn't run
log("Loopback Audio Thread couldn't run!");
if (mMessageHandler != null) {
Message msg = Message.obtain();
- msg.what = FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_ERROR;
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR;
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR;
+ break;
+ }
+
mMessageHandler.sendMessage(msg);
}
}
}
+
public void setMessageHandler(Handler messageHandler) {
mMessageHandler = messageHandler;
}
- public void togglePlay() {
+ public void setIsAdjustingSoundLevel(boolean isAdjustingSoundLevel) {
+ mIsAdjustingSoundLevel = isAdjustingSoundLevel;
}
- public void runTest() {
- if(isRunning) {
+ public void runTest() {
+ if (mIsRunning) {
// start test
if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
log("...run test, but still playing...");
endTest();
} else {
- //erase output buffer
- if (mvSamples != null)
- mvSamples = null;
-
- //resize
- int nNewSize = mSamplingRate * 2; //5 seconds!
- mvSamples = new double[nNewSize];
- mSamplesIndex = 0; //reset index
-
- //start playing
- isPlaying = true;
+ // start playing
+ mIsPlaying = true;
mAudioTrack.play();
- boolean status = recorderRunnable.startRecording(mvSamples);
+ boolean status = mRecorderRunnable.startRecording();
- log(" Started capture test");
+ log("Started capture test");
if (mMessageHandler != null) {
Message msg = Message.obtain();
- msg.what = FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_STARTED;
-
- if(!status)
- msg.what = FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_ERROR;
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED;
+ if (!status) {
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR;
+ }
mMessageHandler.sendMessage(msg);
}
}
}
- }
-
- public void endTest() {
- log("--Ending capture test--");
- isPlaying = false;
- mAudioTrack.pause();
- recorderRunnable.stopRecording();
- mPipe.flush();
- mAudioTrack.flush();
-
- if (mMessageHandler != null) {
- Message msg = Message.obtain();
- msg.what = FUN_PLUG_AUDIO_THREAD_MESSAGE_REC_COMPLETE;
- mMessageHandler.sendMessage(msg);
- }
-
- }
-
- public void finish() {
-
- if (isRunning) {
- isRunning = false;
- try {
- sleep(20);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
-
- final AudioTrack at = mAudioTrack;
- if (at != null)
- {
- at.release();
- mAudioTrack = null;
- }
-
- Thread zeThread = mRecorderThread;
- mRecorderThread = null;
- if (zeThread != null) {
- zeThread.interrupt();
- while (zeThread.isAlive()) {
- try {
- Thread.sleep(10);
- } catch (InterruptedException e) {
- break;
- }
- }
- }
}
- private static void log(String msg) {
- Log.v("Loopback", msg);
- }
-
- double [] getWaveData () {
- return recorderRunnable.mvSamples;
- }
-
- ///////////////////////
- //////////////////////
-
- static class RecorderRunnable implements Runnable
- {
- //all recorder things here
- private final PipeShort mPipe;
- private boolean mIsRecording = false;
- private static final Object sRecordingLock = new Object();
-
- private AudioRecord mRecorder;
-
+ public void runBufferTest() {
+ if (mIsRunning) {
+ // start test
+ if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
+ log("...run test, but still playing...");
+ endTest();
+ } else {
+ // start playing
+ mIsPlaying = true;
+ mAudioTrack.play();
+ boolean status = mRecorderRunnable.startBufferRecording();
+ log(" Started capture test");
+ if (mMessageHandler != null) {
+ Message msg = Message.obtain();
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED;
- private int mSelectedRecordSource = MediaRecorder.AudioSource.MIC;
- public int mSamplingRate = 48000;
- private int mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
- public int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
- int mMinRecordBuffSizeInBytes = 0;
- int mMinRecordBuffSizeInSamples = 0;
- // private byte[] mAudioByteArray;
- private short[] mAudioShortArray;
- private short[] mAudioTone;
- private int mAudioToneIndex;
-
- // private static long mStartTime = 0;
-
- double twoPi = 6.28318530718;
-
- public double[] mvSamples; //captured samples
- int mSamplesIndex;
-
- RecorderRunnable(PipeShort pipe, int samplingRate, int channelConfig, int audioFormat,
- int recBufferInBytes, int micSource)
- {
- mPipe = pipe;
- mSamplingRate = samplingRate;
- mChannelConfig = channelConfig;
- mAudioFormat = audioFormat;
- mMinRecordBuffSizeInBytes = recBufferInBytes;
- mSelectedRecordSource = micSource;
- }
-
- //init the recording device
- boolean initRecord() {
- log("Init Record");
-
- if (mMinRecordBuffSizeInBytes <=0 ) {
-
- mMinRecordBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
- mChannelConfig, mAudioFormat);
- log("RecorderRunnable: computing min buff size = " + mMinRecordBuffSizeInBytes
- + " bytes");
- }
- else {
- log("RecorderRunnable: using min buff size = " + mMinRecordBuffSizeInBytes
- + " bytes");
- }
- if (mMinRecordBuffSizeInBytes <= 0) {
- return false;
- }
-
- mMinRecordBuffSizeInSamples = mMinRecordBuffSizeInBytes /2;
-
-// mAudioByteArray = new byte[mMinRecordBuffSizeInBytes / 2];
- mAudioShortArray = new short[mMinRecordBuffSizeInSamples];
+ if (!status) {
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR;
+ }
- try {
- mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
- mChannelConfig, mAudioFormat, 2 * mMinRecordBuffSizeInBytes);
- } catch (IllegalArgumentException e) {
- e.printStackTrace();
- return false;
- }
- if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
- mRecorder.release();
- mRecorder = null;
- return false;
+ mMessageHandler.sendMessage(msg);
+ }
}
-
- createAudioTone(300, 1000,true);
- mAudioToneIndex = 0;
-
- return true;
}
+ }
- boolean startRecording(double vCapture[]) {
- synchronized (sRecordingLock) {
- mIsRecording = true;
- }
- mvSamples = vCapture;
- mSamplesIndex = 0;
+ /** Clean some things up before sending out a message to LoopbackActivity. */
+ public void endTest() {
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ log("--Ending latency test--");
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ log("--Ending buffer test--");
+ break;
+ }
- boolean status = initRecord();
- if (status) {
- log("Ready to go.");
- startRecordingForReal();
+ mIsPlaying = false;
+ mAudioTrack.pause();
+ mLatencyTestPipe.flush();
+ mAudioTrack.flush();
+
+ if (mMessageHandler != null) {
+ Message msg = Message.obtain();
+ if (mIsRequestStop) {
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP;
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP;
+ break;
+ }
} else {
- log("Recorder initialization error.");
- synchronized (sRecordingLock) {
- mIsRecording = false;
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE;
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE;
+ break;
}
}
- return status;
+ mMessageHandler.sendMessage(msg);
}
+ }
- void startRecordingForReal() {
- mAudioToneIndex = 0;
- mPipe.flush();
- mRecorder.startRecording();
- }
- void stopRecording() {
- log("stop recording A");
- synchronized (sRecordingLock) {
- log("stop recording B");
- mIsRecording = false;
- }
- stopRecordingForReal();
- }
+ /**
+ * This is called only when the user requests to stop the test through
+ * pressing a button in the LoopbackActivity.
+ */
+ public void requestStopTest() throws InterruptedException {
+ mIsRequestStop = true;
+ mRecorderRunnable.requestStop();
+ }
- void stopRecordingForReal() {
- log("stop recording for real");
- if (mRecorder != null) {
- mRecorder.stop();
- }
- if (mRecorder != null) {
- mRecorder.release();
- mRecorder = null;
- }
+ /** Release mAudioTrack and mRecorderThread. */
+ public void finish() throws InterruptedException {
+ mIsRunning = false;
+ final AudioTrack at = mAudioTrack;
+ if (at != null) {
+ at.release();
+ mAudioTrack = null;
}
- private void resetLatencyRecord() {
- BufferPeriod.resetRecord();
+ Thread zeThread = mRecorderThread;
+ mRecorderThread = null;
+ if (zeThread != null) {
+ zeThread.interrupt();
+ zeThread.join(Constant.JOIN_WAIT_TIME_MS);
}
+ }
- public void run() {
-
- double phase = 0;
- double maxval = Math.pow(2, 15);
-
- resetLatencyRecord();
- while (!Thread.interrupted()) {
- boolean isRecording = false;
-
- synchronized (sRecordingLock) {
- isRecording = mIsRecording;
- }
- //long mStartTime = System.nanoTime();
-
- if (isRecording && mRecorder != null) {
- BufferPeriod.collectBufferPeriod();
-
- int nSamplesRead = mRecorder.read(mAudioShortArray, 0, mMinRecordBuffSizeInSamples);
-// int nbBytesRead = mRecorder.read(mAudioByteArray, 0,
-// mMinRecordBuffSizeInBytes / 2);
-
-// if (nbBytesRead > 0) {
- if(nSamplesRead > 0) {
- { //injecting the tone
- int currentIndex = mSamplesIndex - 100; //offset
-// for (int i = 0; i < nbBytesRead/2; i++) {
- for(int i=0; i< nSamplesRead; i++) {
- // log(" <"+currentIndex +">");
- if (currentIndex >=0 && currentIndex <mAudioTone.length) {
-// short value = (short) mAudioTone[currentIndex];
-// // log("Injecting: ["+currentIndex+"]="+value);
-// //replace capture
-// mAudioByteArray[i*2+1] =(byte)( 0xFF &(value >>8));
-// mAudioByteArray[i*2] = (byte) ( 0xFF &(value));
- mAudioShortArray[i] = mAudioTone[currentIndex];
- }
- currentIndex++;
- } //for injecting tone
- }
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
- //mPipe.write(mAudioByteArray, 0, nbBytesRead);
- mPipe.write(mAudioShortArray, 0, nSamplesRead);
- if (isStillRoomToRecord()) { //record to vector
-
- // for (int i = 0; i < nbBytesRead/2; i++) {
- for (int i=0; i< nSamplesRead; i++) {
- double value = mAudioShortArray[i];
-// byte ba = mAudioByteArray[i*2+1];
-// byte bb = mAudioByteArray[i*2];
-// value = (ba << 8) +(bb);
- value = value/maxval;
- if ( mSamplesIndex < mvSamples.length) {
- mvSamples[mSamplesIndex++] = value;
- }
-
- }
- }
- }
- }
- }//synchronized
- stopRecording();//close this
- }
+ public double[] getWaveData() {
+ return mRecorderRunnable.getWaveData();
+ }
- public boolean isStillRoomToRecord() {
- boolean result = false;
- if (mvSamples != null) {
- if (mSamplesIndex < mvSamples.length) {
- result = true;
- }
- }
+ public int[] getAllGlitches() {
+ return mRecorderRunnable.getAllGlitches();
+ }
- return result;
- }
- private void createAudioTone(int durationSamples, int frequency, boolean taperEnds) {
- mAudioTone = new short[durationSamples];
- double phase = 0;
+ public boolean getGlitchingIntervalTooLong() {
+ return mRecorderRunnable.getGlitchingIntervalTooLong();
+ }
- for (int i = 0; i < durationSamples; i++) {
- double factor = 1.0;
- if (taperEnds) {
- if (i<durationSamples/2) {
- factor = 2.0*i/durationSamples;
- } else {
- factor = 2.0*(durationSamples-i)/durationSamples;
- }
- }
- short value = (short) (factor* Math.sin(phase)*10000);
+ public int getFFTSamplingSize() {
+ return mRecorderRunnable.getFFTSamplingSize();
+ }
- mAudioTone[i] = value;
- phase += twoPi * frequency / mSamplingRate;
- }
- while (phase > twoPi)
- phase -= twoPi;
- }
+ public int getFFTOverlapSamples() {
+ return mRecorderRunnable.getFFTOverlapSamples();
+ }
- private static void log(String msg) {
- Log.v("Recorder", msg);
- }
- //public static long getStartTime()
- // return mStartTime;
+ int getDurationInSeconds() {
+ return mBufferTestDurationInSeconds;
+ }
- } //RecorderRunnable
-}; //end thread.
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
index da6610f..fcec9c2 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
@@ -16,65 +16,82 @@
package org.drrickorang.loopback;
-//import android.content.Context;
-//import android.app.Activity;
-import android.media.AudioFormat;
-import android.media.AudioManager;
-import android.media.AudioTrack;
-//import android.media.MediaPlayer;
-import android.media.AudioRecord;
-import android.media.MediaRecorder;
-import android.util.Log;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import android.util.Log;
import android.os.Handler;
-import android.os.Message;
+import android.os.Message;
+
/**
* A thread/audio track based audio synth.
*/
-public class NativeAudioThread extends Thread {
-
- public boolean isRunning = false;
- double twoPi = 6.28318530718;
-
- public int mSessionId;
-
- public double[] mvSamples; //captured samples
- int mSamplesIndex;
-
- private final int mSecondsToRun = 2;
- public int mSamplingRate = 48000;
- private int mChannelConfigIn = AudioFormat.CHANNEL_IN_MONO;
- private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
- int mMinPlayBufferSizeInBytes = 0;
- int mMinRecordBuffSizeInBytes = 0;
- private int mChannelConfigOut = AudioFormat.CHANNEL_OUT_MONO;
-
- int mMicSource = 0;
-
-// private double [] samples = new double[50000];
-
- boolean isPlaying = false;
+public class NativeAudioThread extends Thread {
+ private static final String TAG = "NativeAudioThread";
+
+ // for latency test
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED = 891;
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR = 892;
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE = 893;
+
+ // for buffer test
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED = 896;
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR = 897;
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE = 898;
+
+ // used by both latency test and buffer test
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS = 894;
+ static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_STOP = 900;
+
+ public boolean mIsRunning = false;
+ public int mSessionId;
+ public double[] mSamples; // store samples that will be shown on WavePlotView
+ int mSamplesIndex;
+
+ private int mTestType;
+ private int mSamplingRate;
+ private int mMinPlayerBufferSizeInBytes = 0;
+ private int mMinRecorderBuffSizeInBytes = 0; // currently not used
+ private int mMicSource;
+
+ private boolean mIsRequestStop = false;
private Handler mMessageHandler;
- boolean isDestroying = false;
- boolean hasDestroyingErrors = false;
-
- static final int FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_STARTED = 892;
- static final int FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_ERROR = 893;
- static final int FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE = 894;
- static final int FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS = 895;
-
- public void setParams(int samplingRate, int playBufferInBytes, int recBufferInBytes, int micSource) {
+ private boolean isDestroying = false;
+ private boolean hasDestroyingErrors = false;
+
+ // for buffer test
+ private int[] mRecorderBufferPeriod;
+ private int mRecorderMaxBufferPeriod;
+ private int[] mPlayerBufferPeriod;
+ private int mPlayerMaxBufferPeriod;
+ private int mBufferTestWavePlotDurationInSeconds;
+ private double mFrequency1 = Constant.PRIME_FREQUENCY_1;
+ private double mFrequency2 = Constant.PRIME_FREQUENCY_2; // not actually used
+ private int mBufferTestDurationInSeconds;
+ private int mFFTSamplingSize;
+ private int mFFTOverlapSamples;
+ private int[] mAllGlitches;
+ private boolean mGlitchingIntervalTooLong;
+
+ private PipeByteBuffer mPipeByteBuffer;
+ private GlitchDetectionThread mGlitchDetectionThread;
+
+
+ public NativeAudioThread(int samplingRate, int playerBufferInBytes, int recorderBufferInBytes,
+ int micSource, int testType, int bufferTestDurationInSeconds,
+ int bufferTestWavePlotDurationInSeconds) {
mSamplingRate = samplingRate;
-
- mMinPlayBufferSizeInBytes = playBufferInBytes;
- mMinRecordBuffSizeInBytes = recBufferInBytes;
-
+ mMinPlayerBufferSizeInBytes = playerBufferInBytes;
+ mMinRecorderBuffSizeInBytes = recorderBufferInBytes;
mMicSource = micSource;
-
+ mTestType = testType;
+ mBufferTestDurationInSeconds = bufferTestDurationInSeconds;
+ mBufferTestWavePlotDurationInSeconds = bufferTestWavePlotDurationInSeconds;
}
+
//JNI load
static {
try {
@@ -83,105 +100,149 @@ public class NativeAudioThread extends Thread {
log("Error loading loopback JNI library");
e.printStackTrace();
}
-
/* TODO: gracefully fail/notify if the library can't be loaded */
}
+
//jni calls
- public native long slesInit(int samplingRate, int frameCount, int micSource);
- public native int slesProcessNext(long sles_data, double[] samples, long offset);
- public native int slesDestroy(long sles_data);
+ public native long slesInit(int samplingRate, int frameCount, int micSource,
+ int testType, double frequency1, ByteBuffer byteBuffer);
+ public native int slesProcessNext(long sles_data, double[] samples, long offset);
+ public native int slesDestroy(long sles_data);
+
+ // to get buffer period data
+ public native int[] slesGetRecorderBufferPeriod(long sles_data);
+ public native int slesGetRecorderMaxBufferPeriod(long sles_data);
+ public native int[] slesGetPlayerBufferPeriod(long sles_data);
+ public native int slesGetPlayerMaxBufferPeriod(long sles_data);
- public void run() {
+ public void run() {
setPriority(Thread.MAX_PRIORITY);
- isRunning = true;
+ mIsRunning = true;
//erase output buffer
- if (mvSamples != null)
- mvSamples = null;
-
- //resize
- int nNewSize = (int)(1.1* mSamplingRate * mSecondsToRun ); //10% more just in case
- mvSamples = new double[nNewSize];
- mSamplesIndex = 0; //reset index
-
- //clear samples
- for(int i=0; i<nNewSize; i++) {
- mvSamples[i] = 0;
- }
+ if (mSamples != null)
+ mSamples = null;
//start playing
- isPlaying = true;
-
-
log(" Started capture test");
if (mMessageHandler != null) {
Message msg = Message.obtain();
- msg.what = FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_STARTED;
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED;
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED;
+ break;
+ }
mMessageHandler.sendMessage(msg);
}
-
-
log(String.format("about to init, sampling rate: %d, buffer:%d", mSamplingRate,
- mMinPlayBufferSizeInBytes/2 ));
- long sles_data = slesInit(mSamplingRate, mMinPlayBufferSizeInBytes/2, mMicSource);
- log(String.format("sles_data = 0x%X",sles_data));
+ mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME));
- if(sles_data == 0 ) {
- //notify error!!
+ // mPipeByteBuffer is only used in buffer test
+ mPipeByteBuffer = new PipeByteBuffer(Constant.MAX_SHORTS);
+ long startTimeMs = System.currentTimeMillis();
+ long sles_data = slesInit(mSamplingRate, mMinPlayerBufferSizeInBytes /
+ Constant.BYTES_PER_FRAME, mMicSource, mTestType, mFrequency1,
+ mPipeByteBuffer.getByteBuffer());
+ log(String.format("sles_data = 0x%X", sles_data));
+ if (sles_data == 0) {
+ //notify error!!
log(" ERROR at JNI initialization");
if (mMessageHandler != null) {
Message msg = Message.obtain();
- msg.what = FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_ERROR;
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR;
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR;
+ break;
+ }
mMessageHandler.sendMessage(msg);
}
- } else {
-
- //wait a little bit...
+ } else {
+ // wait a little bit
try {
- sleep(10); //just to let it start properly?
+ final int setUpTime = 10;
+ sleep(setUpTime); //just to let it start properly
} catch (InterruptedException e) {
e.printStackTrace();
}
-
- mSamplesIndex = 0;
int totalSamplesRead = 0;
- long offset = 0;
- for (int ii = 0; ii < mSecondsToRun; ii++) {
- log(String.format("block %d...", ii));
- int samplesRead = slesProcessNext(sles_data, mvSamples,offset);
- totalSamplesRead += samplesRead;
-
- offset += samplesRead;
- log(" [" + ii + "] jni samples read:" + samplesRead + " currentOffset:" + offset);
-
-// log(" [" + ii + "] jni samples read:" + samplesRead + " currentSampleIndex:" + mSamplesIndex);
-// {
-// for (int jj = 0; jj < samplesRead && mSamplesIndex < mvSamples.length; jj++) {
-// mvSamples[mSamplesIndex++] = samples[jj];
-// }
-// }
- }
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ final int latencyTestDurationInSeconds = 2;
+ int nNewSize = (int) (1.1 * mSamplingRate * latencyTestDurationInSeconds);
+ mSamples = new double[nNewSize];
+ mSamplesIndex = 0; //reset index
+ Arrays.fill(mSamples, 0);
+
+ //TODO use a ByteBuffer to retrieve recorded data instead
+ long offset = 0;
+ // retrieve native recorder's recorded data
+ for (int ii = 0; ii < latencyTestDurationInSeconds; ii++) {
+ log(String.format("block %d...", ii));
+ int samplesRead = slesProcessNext(sles_data, mSamples, offset);
+ totalSamplesRead += samplesRead;
+ offset += samplesRead;
+ log(" [" + ii + "] jni samples read:" + samplesRead +
+ " currentOffset:" + offset);
+ }
+
+ log(String.format(" samplesRead: %d, sampleOffset:%d", totalSamplesRead, offset));
+ log("about to destroy...");
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ //TODO adjust sound level to appropriate level before doing native buffer test
+ setUpGlitchDetectionThread();
+ long testDurationMs = mBufferTestDurationInSeconds * Constant.MILLIS_PER_SECOND;
+ long elapsedTimeMs = System.currentTimeMillis() - startTimeMs;
+ while (elapsedTimeMs < testDurationMs) {
+ if (mIsRequestStop) {
+ break;
+ } else {
+ try {
+ final int setUpTime = 100;
+ sleep(setUpTime); //just to let it start properly
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ elapsedTimeMs = System.currentTimeMillis() - startTimeMs;
+ }
+
+ }
+ break;
- //log(String.format(" samplesRead: %d, samplesIndex:%d", totalSamplesRead, mSamplesIndex));
- log(String.format(" samplesRead: %d, sampleOffset:%d", totalSamplesRead, offset));
- log(String.format("about to destroy..."));
-// int status = slesDestroy(sles_data);
-// log(String.format("sles delete status: %d", status));
+ }
+
+ // collect buffer period data
+ mRecorderBufferPeriod = slesGetRecorderBufferPeriod(sles_data);
+ mRecorderMaxBufferPeriod = slesGetRecorderMaxBufferPeriod(sles_data);
+ mPlayerBufferPeriod = slesGetPlayerBufferPeriod(sles_data);
+ mPlayerMaxBufferPeriod = slesGetPlayerMaxBufferPeriod(sles_data);
+
+ // get glitches data only for buffer test
+ if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD) {
+ mAllGlitches = mGlitchDetectionThread.getGlitches();
+ mSamples = mGlitchDetectionThread.getWaveData();
+ mGlitchingIntervalTooLong = mGlitchDetectionThread.getGlitchingIntervalTooLong();
+ endDetecting();
+ }
runDestroy(sles_data);
- int maxTry = 20;
+ final int maxTry = 20;
int tryCount = 0;
- //isDestroying = true;
while (isDestroying) {
-
try {
sleep(40);
} catch (InterruptedException e) {
@@ -189,7 +250,6 @@ public class NativeAudioThread extends Thread {
}
tryCount++;
-
log("destroy try: " + tryCount);
if (tryCount >= maxTry) {
@@ -200,26 +260,66 @@ public class NativeAudioThread extends Thread {
}
log(String.format("after destroying. TotalSamplesRead = %d", totalSamplesRead));
- if(totalSamplesRead==0)
- {
- hasDestroyingErrors = true;
+ // for buffer test samples won't be read into here
+ if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY
+ && totalSamplesRead == 0) {
+ //hasDestroyingErrors = true;
+ log("Warning: Latency test reads no sample from native recorder!");
}
endTest();
}
}
+
+ public void requestStopTest() {
+ mIsRequestStop = true;
+ }
+
+
+ /** Set up parameters needed for GlitchDetectionThread, then create and run this thread. */
+ private void setUpGlitchDetectionThread() {
+ final int targetFFTMs = 20; // we want each FFT to cover 20ms of samples
+ mFFTSamplingSize = targetFFTMs * mSamplingRate / Constant.MILLIS_PER_SECOND;
+ // round to the nearest power of 2
+ mFFTSamplingSize = (int) Math.pow(2, Math.round(Math.log(mFFTSamplingSize) / Math.log(2)));
+
+ if (mFFTSamplingSize < 2) {
+ mFFTSamplingSize = 2; // mFFTSamplingSize should be at least 2
+ }
+ mFFTOverlapSamples = mFFTSamplingSize / 2; // mFFTOverlapSamples is half of mFFTSamplingSize
+
+ mGlitchDetectionThread = new GlitchDetectionThread(mFrequency1, mFrequency2, mSamplingRate,
+ mFFTSamplingSize, mFFTOverlapSamples, mBufferTestDurationInSeconds,
+ mBufferTestWavePlotDurationInSeconds, mPipeByteBuffer);
+ mGlitchDetectionThread.start();
+ }
+
+
+ public void endDetecting() {
+ mPipeByteBuffer.flush();
+ mPipeByteBuffer = null;
+ mGlitchDetectionThread.requestStop();
+ GlitchDetectionThread tempThread = mGlitchDetectionThread;
+ mGlitchDetectionThread = null;
+ try {
+ tempThread.join(Constant.JOIN_WAIT_TIME_MS);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+
public void setMessageHandler(Handler messageHandler) {
mMessageHandler = messageHandler;
}
- private void runDestroy(final long sles_data ) {
+
+ private void runDestroy(final long sles_data) {
isDestroying = true;
//start thread
-
final long local_sles_data = sles_data;
- ////
Thread thread = new Thread(new Runnable() {
public void run() {
isDestroying = true;
@@ -232,57 +332,103 @@ public class NativeAudioThread extends Thread {
});
thread.start();
-
-
-
log("end of runDestroy()");
-
-
}
- public void togglePlay() {
+
+ /** not doing real work, just to keep consistency with LoopbackAudioThread. */
+ public void runTest() {
}
- public void runTest() {
+ /** not doing real work, just to keep consistency with LoopbackAudioThread. */
+ public void runBufferTest() {
}
- public void endTest() {
- log("--Ending capture test--");
- isPlaying = false;
-
+ public void endTest() {
+ log("--Ending capture test--");
if (mMessageHandler != null) {
Message msg = Message.obtain();
- if(hasDestroyingErrors)
- msg.what = FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS;
- else
- msg.what = FUN_PLUG_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE;
+ if (hasDestroyingErrors) {
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS;
+ } else if (mIsRequestStop) {
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_REC_STOP;
+ } else {
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE;
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE;
+ break;
+ }
+ }
+
mMessageHandler.sendMessage(msg);
}
+ }
- }
public void finish() {
-
- if (isRunning) {
- isRunning = false;
- try {
- sleep(20);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
+ mIsRunning = false;
}
+
private static void log(String msg) {
- Log.v("Loopback", msg);
+ Log.v(TAG, msg);
}
- double [] getWaveData () {
- return mvSamples;
+
+ double[] getWaveData() {
+ return mSamples;
+ }
+
+
+ public int[] getRecorderBufferPeriod() {
+ return mRecorderBufferPeriod;
+ }
+
+
+ public int getRecorderMaxBufferPeriod() {
+ return mRecorderMaxBufferPeriod;
+ }
+
+
+ public int[] getPlayerBufferPeriod() {
+ return mPlayerBufferPeriod;
+ }
+
+
+ public int getPlayerMaxBufferPeriod() {
+ return mPlayerMaxBufferPeriod;
+ }
+
+
+ public int[] getNativeAllGlitches() {
+ return mAllGlitches;
+ }
+
+
+ public boolean getGlitchingIntervalTooLong() {
+ return mGlitchingIntervalTooLong;
+ }
+
+
+ public int getNativeFFTSamplingSize() {
+ return mFFTSamplingSize;
+ }
+
+
+ public int getNativeFFTOverlapSamples() {
+ return mFFTOverlapSamples;
+ }
+
+
+ public int getDurationInSeconds() {
+ return mBufferTestDurationInSeconds;
}
-} //end thread.
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java
new file mode 100644
index 0000000..1670b8b
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java
@@ -0,0 +1,277 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.util.Log;
+
+
+/**
+ * This class is used to automatically the audio performance according to recorder/player buffer
+ * period.
+ */
+
+public class PerformanceMeasurement {
+ public static final String TAG = "PerformanceMeasurement";
+
+ // this is used to enlarge the benchmark, so that it can be displayed with better accuracy on
+ // the dashboard
+ private static final int mMultiplicationFactor = 10000;
+
+ private int mExpectedBufferPeriodMs;
+ private int[] mBufferData;
+ private int mTotalOccurrence;
+
+ // used to determine buffer sizes mismatch
+ private static final double mPercentOccurrenceThreshold = 0.95;
+ // used to count the number of outliers
+ private static final int mOutliersThreshold = 3;
+
+
+ /**
+ * Note: if mBufferSize * Constant.MILLIS_PER_SECOND / mSamplingRate == Integer is satisfied,
+ * the measurement will be more accurate, but this is not necessary.
+ */
+ public PerformanceMeasurement(int bufferSize, int samplingRate, int[] bufferData) {
+ mBufferData = bufferData;
+
+ mTotalOccurrence = 0;
+ for (int i = 0; i < mBufferData.length; i++) {
+ mTotalOccurrence += mBufferData[i];
+ }
+
+ mExpectedBufferPeriodMs = bufferSize * Constant.MILLIS_PER_SECOND / samplingRate;
+ }
+
+
+ /**
+ * Measure the performance according to the collected buffer period.
+ * First, determine if there is a buffer sizes mismatch. If there is, then the performance
+ * measurement should be disregarded since it won't be accurate. If there isn't a mismatch,
+ * then a benchmark and a count on outliers can be produced as a measurement of performance.
+ * The benchmark should be as small as possible, so is the number of outliers.
+ * Note: This is a wrapper method that calls different methods and prints their results. It is
+ * also possible to call individual method to obtain specific result.
+ * Note: Should try to compare the number of outliers with the number of glitches and see if
+ * they match.
+ */
+ public void measurePerformance() {
+ // calculate standard deviation and mean of mBufferData
+ double mean = computeMean(mBufferData);
+ double standardDeviation = computeStandardDeviation(mBufferData, mean);
+ log("mean before discarding 99% data: " + mean);
+ log("standard deviation before discarding 99% data: " + standardDeviation);
+ log("stdev/mean before discarding 99% data: " + (standardDeviation / mean));
+
+ // calculate standard deviation and mean of dataAfterDiscard
+ int[] dataAfterDiscard = computeDataAfterDiscard(mBufferData);
+ double meanAfterDiscard = computeMean(dataAfterDiscard);
+ double standardDeviationAfterDiscard = computeStandardDeviation(dataAfterDiscard,
+ meanAfterDiscard);
+ log("mean after discarding 99% data: " + meanAfterDiscard);
+ log("standard deviation after discarding 99% data: " + standardDeviationAfterDiscard);
+ log("stdev/mean after discarding 99% data: " + (standardDeviationAfterDiscard /
+ meanAfterDiscard));
+ log("percent difference between two means: " + (Math.abs(meanAfterDiscard - mean) / mean));
+
+ // determine if there's a buffer sizes mismatch
+ boolean isBufferSizesMismatch = determineIsBufferSizesMatch();
+
+ // compute benchmark and count the number of outliers
+ double benchmark = computeWeightedBenchmark();
+ int outliers = countOutliers();
+
+ log("total occurrence: " + mTotalOccurrence);
+ log("buffer size mismatch: " + isBufferSizesMismatch);
+ log("benchmark: " + benchmark);
+ log("number of outliers: " + outliers);
+ log("expected buffer period: " + mExpectedBufferPeriodMs + " ms");
+ int maxPeriod = (mBufferData.length - 1);
+ log("max buffer period: " + maxPeriod + " ms");
+ }
+
+
+ /**
+ * Determine whether or not there is a buffer sizes mismatch by summing the counts around
+ * mExpectedBufferPeriod. If the percent of this count over the total count is larger than
+ * mPercentOccurrenceThreshold, then there is no mismatch. Else, there is mismatch.
+ * Note: This method may not work in every case, but should work in most cases.
+ */
+ public boolean determineIsBufferSizesMatch() {
+ int occurrenceNearExpectedBufferPeriod = 0;
+ // indicate how many beams around mExpectedBufferPeriod do we want to add to the count
+ int numberOfBeams = 2;
+ int start = Math.max(0, mExpectedBufferPeriodMs - numberOfBeams);
+ int end = Math.min(mBufferData.length, mExpectedBufferPeriodMs + numberOfBeams + 1);
+ for (int i = start; i < end; i++) {
+ occurrenceNearExpectedBufferPeriod += mBufferData[i];
+ }
+ double percentOccurrence = ((double) occurrenceNearExpectedBufferPeriod) / mTotalOccurrence;
+ log("percent occurrence near center: " + percentOccurrence);
+ if (percentOccurrence > mPercentOccurrenceThreshold) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+
+
+ /**
+ * Compute a benchmark using the following formula:
+ * (1/totalOccurrence) sum_i(|i - expectedBufferPeriod|^2 * occurrence_i / expectedBufferPeriod)
+ * , for i < expectedBufferPeriod * mOutliersThreshold
+ * Also, the benchmark is additionally multiplied by mMultiplicationFactor. This is not in the
+ * original formula, and it is used only because the original benchmark will be too small to
+ * be displayed accurately on the dashboard.
+ */
+ public double computeWeightedBenchmark() {
+ double weightedCount = 0;
+ double weight;
+ double benchmark;
+
+ // don't count mExpectedBufferPeriodMs + 1 towards benchmark, cause this beam may be large
+ // due to rounding issue (all results are rounded up when collecting buffer period.)
+ int threshold = Math.min(mBufferData.length, mExpectedBufferPeriodMs * mOutliersThreshold);
+ for (int i = 0; i < threshold; i++) {
+ if (mBufferData[i] != 0 && (i != mExpectedBufferPeriodMs + 1)) {
+ weight = Math.abs(i - mExpectedBufferPeriodMs);
+ weight *= weight; // squared
+ weightedCount += weight * mBufferData[i];
+ }
+ }
+ weightedCount /= mExpectedBufferPeriodMs;
+
+ benchmark = (weightedCount / mTotalOccurrence) * mMultiplicationFactor;
+ return benchmark;
+ }
+
+
+ /**
+ * All occurrence that happens after (mExpectedBufferPeriodMs * mOutliersThreshold) ms, will
+ * be considered as outliers.
+ */
+ public int countOutliers() {
+ int outliersThresholdInMs = mExpectedBufferPeriodMs * mOutliersThreshold;
+ int outliersCount = 0;
+ for (int i = outliersThresholdInMs; i < mBufferData.length; i++) {
+ outliersCount += mBufferData[i];
+ }
+ return outliersCount;
+ }
+
+
+ /**
+ * Output an array that has discarded 99 % of the data in the middle. In this array,
+ * data[i] = x means there are x occurrences of value i.
+ */
+ private int[] computeDataAfterDiscard(int[] data) {
+ // calculate the total amount of data
+ int totalCount = 0;
+ int length = data.length;
+ for (int i = 0; i < length; i++) {
+ totalCount += data[i];
+ }
+
+ // we only want to keep a certain percent of data at the bottom and top
+ final double percent = 0.005;
+ int bar = (int) (totalCount * percent);
+ if (bar == 0) { // at least keep the lowest and highest data
+ bar = 1;
+ }
+ int count = 0;
+ int[] dataAfterDiscard = new int[length];
+
+ // for bottom data
+ for (int i = 0; i < length; i++) {
+ if (count > bar) {
+ break;
+ } else if (count + data[i] > bar) {
+ dataAfterDiscard[i] += bar - count;
+ break;
+ } else {
+ dataAfterDiscard[i] += data[i];
+ count += data[i];
+ }
+ }
+
+ // for top data
+ count = 0;
+ for (int i = length - 1; i >= 0; i--) {
+ if (count > bar) {
+ break;
+ } else if (count + data[i] > bar) {
+ dataAfterDiscard[i] += bar - count;
+ break;
+ } else {
+ dataAfterDiscard[i] += data[i];
+ count += data[i];
+ }
+ }
+
+ return dataAfterDiscard;
+ }
+
+
+ /**
+ * Calculate the mean of int array "data". In this array, data[i] = x means there are
+ * x occurrences of value i.
+ */
+ private double computeMean(int[] data) {
+ int count = 0;
+ int sum = 0;
+ for (int i = 0; i < data.length; i++) {
+ count += data[i];
+ sum += data[i] * i;
+ }
+
+ double mean;
+ if (count != 0) {
+ mean = (double) sum / count;
+ } else {
+ mean = 0;
+ log("zero count!");
+ }
+
+ return mean;
+ }
+
+
+ /**
+ * Calculate the standard deviation of int array "data". In this array, data[i] = x means
+ * there are x occurrences of value i.
+ */
+ private double computeStandardDeviation(int[] data, double mean) {
+ double sumDeviation = 0;
+ int count = 0;
+ double standardDeviation;
+
+ for (int i = 0; i < data.length; i++) {
+ if (data[i] != 0) {
+ count += data[i];
+ sumDeviation += (i - mean) * (i - mean) * data[i];
+ }
+ }
+
+ standardDeviation = Math.sqrt(sumDeviation / (count - 1));
+ return standardDeviation;
+ }
+
+
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
index da0492a..fa5991f 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,126 +16,37 @@
package org.drrickorang.loopback;
-// Non-blocking pipe supports a single writer and single reader.
-// The write side of a pipe permits overruns; flow control is the caller's responsibility.
-class Pipe {
+/**
+ * This class is a pipe that allows one writer and one reader.
+ */
- private int mFront;
- private int mRear;
- private byte mBuffer[];
- private volatile int mVolatileRear; // written by write(), read by read()
- private int mMaxBytes;
- private int mBytesOverrun;
- private int mOverruns;
- public static final int OVERRUN = -2;
+public abstract class Pipe {
+ public static final int OVERRUN = -2; // when there's an overrun, return this value
- // maxBytes will be rounded up to a power of 2, and all slots are available. Must be >= 2.
- Pipe(int maxBytes)
- {
- mMaxBytes = roundup(maxBytes);
- mBuffer = new byte[mMaxBytes];
- }
+ protected int mSamplesOverrun;
+ protected int mOverruns;
+ protected final int mMaxValues; // always in power of two
- // buffer must != null.
- // offset must be >= 0.
- // count is maximum number of bytes to copy, and must be >= 0.
- // offset + count must be <= buffer.length.
- // Returns actual number of bytes copied >= 0.
- int write(byte[] buffer, int offset, int count)
- {
- int rear = mRear & (mMaxBytes - 1);
- int written = mMaxBytes - rear;
- if (written > count) {
- written = count;
- }
- System.arraycopy(buffer, offset, mBuffer, rear, written);
- if (rear + written == mMaxBytes) {
- if ((count -= written) > rear) {
- count = rear;
- }
- if (count > 0) {
- System.arraycopy(buffer, offset + written, mBuffer, 0, count);
- written += count;
- }
- }
- mRear += written;
- mVolatileRear = mRear;
- return written;
- }
- int availableToRead()
- {
- int rear = mVolatileRear;
- int avail = rear - mFront;
- if (avail > mMaxBytes) {
- // Discard 1/16 of the most recent data in pipe to avoid another overrun immediately
- int oldFront = mFront;
- mFront = rear - mMaxBytes + (mMaxBytes >> 4);
- mBytesOverrun += mFront - oldFront;
- ++mOverruns;
- return OVERRUN;
- }
- return avail;
+ /** maxSamples must be >= 2. */
+ public Pipe(int maxSamples) {
+ mMaxValues = Utilities.roundup(maxSamples); // round up to the nearest power of 2
}
- // buffer must != null.
- // offset must be >= 0.
- // count is maximum number of bytes to copy, and must be >= 0.
- // offset + count must be <= buffer.length.
- // Returns actual number of bytes copied >= 0.
- int read(byte[] buffer, int offset, int count)
- {
- int avail = availableToRead();
- if (avail <= 0) {
- return avail;
- }
- // An overrun can occur from here on and be silently ignored,
- // but it will be caught at next read()
- if (count > avail) {
- count = avail;
- }
- int front = mFront & (mMaxBytes - 1);
- int red = mMaxBytes - front;
- if (red > count) {
- red = count;
- }
- // In particular, an overrun during the System.arraycopy will result in reading corrupt data
- System.arraycopy(mBuffer, front, buffer, offset, red);
- // We could re-read the rear pointer here to detect the corruption, but why bother?
- if (front + red == mMaxBytes) {
- if ((count -= red) > front) {
- count = front;
- }
- if (count > 0) {
- System.arraycopy(mBuffer, 0, buffer, offset + red, count);
- red += count;
- }
- }
- mFront += red;
- return red;
- }
- void flush()
- {
- mRear = mFront;
- mVolatileRear = mFront;
- }
+ /**
+ * Read at most "count" number of samples into array "buffer", starting from index "offset".
+ * Ff the available samples to read is smaller than count, just read as much as it can and
+ * return the amount of samples read (non-blocking). offset + count must be <= buffer.length.
+ */
+ public abstract int read(short[] buffer, int offset, int count);
+
+
+ /** Return the amount of samples available to read. */
+ public abstract int availableToRead();
- // Round up to the next highest power of 2
- private static int roundup(int v)
- {
- // Integer.numberOfLeadingZeros() returns 32 for zero input
- if (v == 0) {
- v = 1;
- }
- int lz = Integer.numberOfLeadingZeros(v);
- int rounded = 0x80000000 >>> lz;
- // 0x800000001 and higher are actually rounded _down_ to prevent overflow
- if (v > rounded && lz > 0) {
- rounded <<= 1;
- }
- return rounded;
- }
+ /** Clear the pipe. */
+ public abstract void flush();
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java
new file mode 100644
index 0000000..7c95aaf
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+import android.util.Log;
+
+
+/**
+ * Non-blocking pipe where writer writes to the pipe using by knowing the address of "mByteBuffer",
+ * and write to this ByteBuffer directly. On the other hand, reader reads from the pipe using
+ * read(), which converts data in ByteBuffer into shorts.
+ * Data in the pipe are stored in the ByteBuffer array "mByteBuffer".
+ * The write side of a pipe permits overruns; flow control is the caller's responsibility.
+ */
+
+public class PipeByteBuffer extends Pipe {
+ private static final String TAG = "PipeByteBuffer";
+
+ private final ByteBuffer mByteBuffer;
+ private int mFront = 0; // reader's current position
+
+
+ /**
+ * The ByteBuffer in this class consists of two sections. The first section is the actual pipe
+ * to store data. This section must have a size in power of 2, and this is enforced by the
+ * constructor through rounding maxSamples up to the nearest power of 2. This second section
+ * is used to store metadata. Currently the only metadata is an integer that stores the rear,
+ * where rear is the writer's current position. The metadata is at the end of ByteBuffer, and is
+ * outside of the actual pipe.
+ * IMPORTANT: The code is designed (in native code) such that metadata won't be overwritten when
+ * the writer writes to the pipe. If changes to the code are required, please make sure the
+ * metadata won't be overwritten.
+ * IMPORTANT: Since a signed integer is used to store rear and mFront, their values should not
+ * exceed 2^31 - 1, or else overflows happens and the positions of read and mFront becomes
+ * incorrect.
+ */
+ public PipeByteBuffer(int maxSamples) {
+ super(maxSamples);
+ int extraInt = 1; // used to store rear
+ int extraShort = extraInt * Constant.SHORTS_PER_INT;
+ int numberOfShorts = mMaxValues + extraShort;
+ mByteBuffer = ByteBuffer.allocateDirect(numberOfShorts * Constant.BYTES_PER_SHORT);
+ mByteBuffer.order(ByteOrder.LITTLE_ENDIAN);
+ }
+
+
+ /**
+ * Convert data in mByteBuffer into short, and put them into "buffer".
+ * Note: rear and mFront are keep in terms of number of short instead of number of byte.
+ */
+ @Override
+ public int read(short[] buffer, int offset, int requiredSamples) {
+ // first, update the current rear
+ int rear;
+ synchronized (mByteBuffer) {
+ rear = mByteBuffer.getInt(mMaxValues * Constant.BYTES_PER_SHORT);
+ }
+ //log("initial offset: " + offset + "\n initial requiredSamples: " + requiredSamples);
+
+ // after here, rear may actually be updated further. However, we don't care. If at the point
+ // of checking there's enough data then we will read it. If not just wait until next call
+ // of read.
+ int avail = availableToRead(rear, mFront);
+ if (avail <= 0) { //return -2 for overrun
+ return avail;
+ }
+
+ // if not enough samples, just read partial samples
+ if (requiredSamples > avail) {
+ requiredSamples = avail;
+ }
+
+ // mask the upper bits to get the correct position in the pipe
+ int front = mFront & (mMaxValues - 1);
+ int read = mMaxValues - front; // total samples from currentIndex until the end of array
+ if (read > requiredSamples) {
+ read = requiredSamples;
+ }
+
+ int byteBufferFront = front * Constant.BYTES_PER_SHORT; // start reading from here
+ byteBufferToArray(buffer, offset, read, byteBufferFront);
+
+ if (front + read == mMaxValues) {
+ int samplesLeft = requiredSamples - read;
+ if (samplesLeft > 0) {
+ byteBufferFront = 0;
+ byteBufferToArray(buffer, offset + read, read + samplesLeft, byteBufferFront);
+ read += samplesLeft;
+ }
+ }
+
+ mFront += read;
+ return read;
+ }
+
+
+ /**
+ * Copy mByteBuffer's data (starting from "byteBufferFront") into double array "buffer".
+ * "start" is the starting index of "buffer" and "length" is the amount of samples copying.
+ */
+ private void byteBufferToArray(short[] buffer, int start, int length, int byteBufferFront) {
+ for (int i = start; i < (start + length); i++) {
+ buffer[i] = mByteBuffer.getShort(byteBufferFront);
+ byteBufferFront += Constant.BYTES_PER_SHORT;
+ }
+ }
+
+
+ /** Private function that actually calculate the number of samples available to read. */
+ private int availableToRead(int rear, int front) {
+ int avail = rear - front;
+ if (avail > mMaxValues) {
+ // Discard 1/16 of the most recent data in pipe to avoid another overrun immediately
+ int oldFront = mFront;
+ mFront = rear - mMaxValues + (mMaxValues >> 5);
+ mSamplesOverrun += mFront - oldFront;
+ ++mOverruns;
+ return OVERRUN;
+ }
+
+ return avail;
+ }
+
+
+ @Override
+ public int availableToRead() {
+ int rear;
+ int avail;
+ synchronized (mByteBuffer) {
+ rear = mByteBuffer.getInt(mMaxValues * Constant.BYTES_PER_SHORT);
+ }
+
+ avail = availableToRead(rear, mFront);
+ return avail;
+ }
+
+
+ public ByteBuffer getByteBuffer() {
+ return mByteBuffer;
+ }
+
+
+ @Override
+ public void flush() {
+ //set rear and front to zero
+ mFront = 0;
+ synchronized (mByteBuffer) {
+ mByteBuffer.putInt(mMaxValues * Constant.BYTES_PER_SHORT, 0);
+ }
+ }
+
+
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java
index 69e4153..829ef49 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java
@@ -16,39 +16,45 @@
package org.drrickorang.loopback;
-// Non-blocking pipe supports a single writer and single reader.
-// The write side of a pipe permits overruns; flow control is the caller's responsibility.
-
-public class PipeShort {
-
- private int mFront;
- private int mRear;
- private short mBuffer[];
- private volatile int mVolatileRear; // written by write(), read by read()
- private int mMaxValues;
- private int mBytesOverrun;
- private int mOverruns;
- public static final int OVERRUN = -2;
-
- // maxBytes will be rounded up to a power of 2, and all slots are available. Must be >= 2.
- public PipeShort(int maxValues)
- {
- mMaxValues = roundup(maxValues);
+
+/**
+ * Non-blocking pipe where writer writes to the pipe using write() and read reads from the pipe
+ * using read(). Data in the pipe are stored in the short array "mBuffer".
+ * The write side of a pipe permits overruns; flow control is the caller's responsibility.
+ */
+
+public class PipeShort extends Pipe {
+ private int mFront; // writer's current position
+ private int mRear; // reader's current position
+ private final short mBuffer[]; // store that data in the pipe
+ private volatile int mVolatileRear; // used to keep rear synchronized
+
+
+ /**
+ * IMPORTANT: Since a signed integer is used to store mRear and mFront, their values should not
+ * exceed 2^31 - 1, or else overflows happens and the positions of read and mFront becomes
+ * incorrect.
+ */
+ public PipeShort(int maxSamples) {
+ super(maxSamples);
mBuffer = new short[mMaxValues];
}
- // buffer must != null.
- // offset must be >= 0.
- // count is maximum number of bytes to copy, and must be >= 0.
- // offset + count must be <= buffer.length.
- // Returns actual number of bytes copied >= 0.
- public int write(short[] buffer, int offset, int count)
- {
+
+ /**
+ * offset must be >= 0.
+ * count is maximum number of bytes to copy, and must be >= 0.
+ * offset + count must be <= buffer.length.
+ * Return actual number of shorts copied, which will be >= 0.
+ */
+ public int write(short[] buffer, int offset, int count) {
+ // mask the upper bits to get the correct position in the pipe
int rear = mRear & (mMaxValues - 1);
int written = mMaxValues - rear;
if (written > count) {
written = count;
}
+
System.arraycopy(buffer, offset, mBuffer, rear, written);
if (rear + written == mMaxValues) {
if ((count -= written) > rear) {
@@ -59,83 +65,75 @@ public class PipeShort {
written += count;
}
}
+
mRear += written;
mVolatileRear = mRear;
return written;
}
- public int availableToRead()
- {
- int rear = mVolatileRear;
- int avail = rear - mFront;
- if (avail > mMaxValues) {
- // Discard 1/16 of the most recent data in pipe to avoid another overrun immediately
- int oldFront = mFront;
- mFront = rear - mMaxValues + (mMaxValues >> 4);
- mBytesOverrun += mFront - oldFront;
- ++mOverruns;
- return OVERRUN;
- }
- return avail;
- }
- // buffer must != null.
- // offset must be >= 0.
- // count is maximum number of bytes to copy, and must be >= 0.
- // offset + count must be <= buffer.length.
- // Returns actual number of bytes copied >= 0.
- public int read(short[] buffer, int offset, int count)
- {
+ @Override
+ public int read(short[] buffer, int offset, int count) {
int avail = availableToRead();
if (avail <= 0) {
return avail;
}
+
// An overrun can occur from here on and be silently ignored,
// but it will be caught at next read()
if (count > avail) {
count = avail;
}
+
+ // mask the upper bits to get the correct position in the pipe
int front = mFront & (mMaxValues - 1);
- int red = mMaxValues - front;
- if (red > count) {
- red = count;
+ int read = mMaxValues - front;
+
+ if (read > count) {
+ read = count;
}
+
// In particular, an overrun during the System.arraycopy will result in reading corrupt data
- System.arraycopy(mBuffer, front, buffer, offset, red);
+ System.arraycopy(mBuffer, front, buffer, offset, read);
// We could re-read the rear pointer here to detect the corruption, but why bother?
- if (front + red == mMaxValues) {
- if ((count -= red) > front) {
+ if (front + read == mMaxValues) {
+ if ((count -= read) > front) {
count = front;
}
+
if (count > 0) {
- System.arraycopy(mBuffer, 0, buffer, offset + red, count);
- red += count;
+ System.arraycopy(mBuffer, 0, buffer, offset + read, count);
+ read += count;
}
}
- mFront += red;
- return red;
- }
- public void flush()
- {
- mRear = mFront;
- mVolatileRear = mFront;
+ mFront += read;
+ return read;
}
- // Round up to the next highest power of 2
- private static int roundup(int v)
- {
- // Integer.numberOfLeadingZeros() returns 32 for zero input
- if (v == 0) {
- v = 1;
- }
- int lz = Integer.numberOfLeadingZeros(v);
- int rounded = 0x80000000 >>> lz;
- // 0x800000001 and higher are actually rounded _down_ to prevent overflow
- if (v > rounded && lz > 0) {
- rounded <<= 1;
+
+
+ @Override
+ public int availableToRead() {
+ int rear = mVolatileRear;
+ int avail = rear - mFront;
+ if (avail > mMaxValues) {
+ // Discard 1/16 of the most recent data in pipe to avoid another overrun immediately
+ int oldFront = mFront;
+ mFront = rear - mMaxValues + (mMaxValues >> 4);
+ mSamplesOverrun += mFront - oldFront;
+ ++mOverruns;
+ return OVERRUN;
}
- return rounded;
+
+ return avail;
+ }
+
+
+ @Override
+ public void flush() {
+ mRear = mFront;
+ mVolatileRear = mFront;
}
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PlayerBufferPeriodActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PlayerBufferPeriodActivity.java
new file mode 100644
index 0000000..b4d0978
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PlayerBufferPeriodActivity.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.util.Arrays;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.view.View;
+
+
+/**
+ * This activity will display a histogram that shows the player's buffer period.
+ */
+
+public class PlayerBufferPeriodActivity extends Activity {
+
+
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ View view = getLayoutInflater().inflate(R.layout.player_buffer_period_activity, null);
+ setContentView(view);
+ HistogramView histogramView = (HistogramView) findViewById(R.id.viewWriteHistogram);
+ Bundle bundle = getIntent().getExtras();
+
+ // setup the histogram
+ int[] bufferTimeStampData = bundle.getIntArray("playerBufferPeriodTimeStampArray");
+ int[] bufferData = bundle.getIntArray("playerBufferPeriodArray");
+ int bufferDataMax = bundle.getInt("playerBufferPeriodMax");
+ histogramView.setBufferPeriodTimeStampArray(bufferTimeStampData);
+ histogramView.setBufferPeriodArray(bufferData);
+ histogramView.setMaxBufferPeriod(bufferDataMax);
+
+
+ // do performance measurement if there are buffer period data
+ if (bufferData != null) {
+ // this is the range of data that actually has values
+ int usefulDataRange = Math.min(bufferDataMax + 1, bufferData.length);
+ int[] usefulBufferData = Arrays.copyOfRange(bufferData, 0, usefulDataRange);
+ int playerBufferSize = bundle.getInt("playerBufferSize");
+ int samplingRate = bundle.getInt("samplingRate");
+ PerformanceMeasurement measurement = new PerformanceMeasurement(playerBufferSize,
+ samplingRate, usefulBufferData);
+ measurement.measurePerformance();
+ }
+
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderBufferPeriodActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderBufferPeriodActivity.java
new file mode 100644
index 0000000..9ecec65
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderBufferPeriodActivity.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.View;
+
+import java.util.Arrays;
+
+
+/**
+ * This activity will display a histogram that shows the recorder's buffer period.
+ */
+
+public class RecorderBufferPeriodActivity extends Activity {
+ private static final String TAG = "RecorderBufferPeriodActivity";
+
+
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ View view = getLayoutInflater().inflate(R.layout.recorder_buffer_period_activity, null);
+ setContentView(view);
+ HistogramView histogramView = (HistogramView) findViewById(R.id.viewReadHistogram);
+ Bundle bundle = getIntent().getExtras();
+
+ // setup the histogram
+ int[] bufferTimeStampData = bundle.getIntArray("recorderBufferPeriodTimeStampArray");
+ int[] bufferData = bundle.getIntArray("recorderBufferPeriodArray");
+ int bufferDataMax = bundle.getInt("recorderBufferPeriodMax");
+ histogramView.setBufferPeriodTimeStampArray(bufferTimeStampData);
+ histogramView.setBufferPeriodArray(bufferData);
+ histogramView.setMaxBufferPeriod(bufferDataMax);
+
+ // do performance measurement if the there are buffer period data
+ if (bufferData != null) {
+ // this is the range of data that actually has values
+ int usefulDataRange = Math.min(bufferDataMax + 1, bufferData.length);
+ int[] usefulBufferData = Arrays.copyOfRange(bufferData, 0, usefulDataRange);
+ int recorderBufferSize = bundle.getInt("recorderBufferSize");
+ int samplingRate = bundle.getInt("samplingRate");
+ PerformanceMeasurement measurement = new PerformanceMeasurement(recorderBufferSize,
+ samplingRate, usefulBufferData);
+ measurement.measurePerformance();
+ }
+
+ }
+
+
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java
new file mode 100644
index 0000000..d28719d
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java
@@ -0,0 +1,544 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.util.Log;
+
+
+/**
+ * This thread records incoming sound samples (uses AudioRecord).
+ */
+
+public class RecorderRunnable implements Runnable {
+ private static final String TAG = "RecorderRunnable";
+
+ private AudioRecord mRecorder;
+ private boolean mIsRunning;
+ private boolean mIsRecording = false;
+ private static final Object sRecordingLock = new Object();
+
+ private final LoopbackAudioThread mAudioThread;
+ // This is the pipe that connects the player and the recorder in latency test.
+ private final PipeShort mLatencyTestPipeShort;
+ // This is the pipe that is used in buffer test to send data to GlitchDetectionThread
+ private PipeShort mBufferTestPipeShort;
+
+ private boolean mIsRequestStop = false;
+ private final int mTestType; // latency test or buffer test
+ private final int mSelectedRecordSource;
+ private final int mSamplingRate;
+ private int mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
+ private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ private int mMinRecorderBuffSizeInBytes = 0;
+ private int mMinRecorderBuffSizeInSamples = 0;
+
+ private short[] mAudioShortArray; // this array stores values from mAudioTone in read()
+ private short[] mBufferTestShortArray;
+ private short[] mAudioTone;
+
+ // for glitch detection (buffer test)
+ private BufferPeriod mRecorderBufferPeriodInRecorder;
+ private final int mBufferTestWavePlotDurationInSeconds;
+ private final double mFrequency1;
+ private final double mFrequency2; // not actually used
+ private int[] mAllGlitches; // value = 1 means there's a glitch in that interval
+ private boolean mGlitchingIntervalTooLong;
+ private int mFFTSamplingSize; // the amount of samples used per FFT.
+ private int mFFTOverlapSamples; // overlap half the samples
+ private long mStartTimeMs;
+ private int mBufferTestDurationInSeconds;
+ private long mBufferTestDurationMs;
+ private final Context mContext;
+ private AudioManager mAudioManager;
+ private GlitchDetectionThread mGlitchDetectionThread;
+
+ // for adjusting sound level in buffer test
+ private double[] mSoundLevelSamples;
+ private int mSoundLevelSamplesIndex = 0;
+ private boolean mIsAdjustingSoundLevel = true; // is true if still adjusting sound level
+ private double mSoundBotLimit = 0.6; // we want to keep the sound level high
+ private double mSoundTopLimit = 0.8; // but we also don't want to be close to saturation
+ private int mAdjustSoundLevelCount = 0;
+ private int mMaxVolume; // max possible volume of the device
+
+ private double[] mSamples; // samples shown on WavePlotView
+ private int mSamplesIndex;
+
+
+ RecorderRunnable(PipeShort latencyPipe, int samplingRate, int channelConfig, int audioFormat,
+ int recorderBufferInBytes, int micSource, LoopbackAudioThread audioThread,
+ BufferPeriod recorderBufferPeriod, int testType, double frequency1,
+ double frequency2, int bufferTestWavePlotDurationInSeconds,
+ Context context) {
+ mLatencyTestPipeShort = latencyPipe;
+ mSamplingRate = samplingRate;
+ mChannelConfig = channelConfig;
+ mAudioFormat = audioFormat;
+ mMinRecorderBuffSizeInBytes = recorderBufferInBytes;
+ mSelectedRecordSource = micSource;
+ mAudioThread = audioThread;
+ mRecorderBufferPeriodInRecorder = recorderBufferPeriod;
+ mTestType = testType;
+ mFrequency1 = frequency1;
+ mFrequency2 = frequency2;
+ mBufferTestWavePlotDurationInSeconds = bufferTestWavePlotDurationInSeconds;
+ mContext = context;
+ }
+
+
+ /** Initialize the recording device for latency test. */
+ public boolean initRecord() {
+ log("Init Record");
+ if (mMinRecorderBuffSizeInBytes <= 0) {
+ mMinRecorderBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
+ mChannelConfig, mAudioFormat);
+ log("RecorderRunnable: computing min buff size = " + mMinRecorderBuffSizeInBytes
+ + " bytes");
+ } else {
+ log("RecorderRunnable: using min buff size = " + mMinRecorderBuffSizeInBytes +
+ " bytes");
+ }
+
+ if (mMinRecorderBuffSizeInBytes <= 0) {
+ return false;
+ }
+
+ mMinRecorderBuffSizeInSamples = mMinRecorderBuffSizeInBytes / Constant.BYTES_PER_FRAME;
+ mAudioShortArray = new short[mMinRecorderBuffSizeInSamples];
+
+ try {
+ mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
+ mChannelConfig, mAudioFormat, 2 * mMinRecorderBuffSizeInBytes);
+ } catch (IllegalArgumentException e) {
+ e.printStackTrace();
+ return false;
+ }
+
+ if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
+ mRecorder.release();
+ mRecorder = null;
+ return false;
+ }
+
+ createAudioTone(300, 1000, true);
+
+ return true;
+ }
+
+
+ /** Initialize the recording device for buffer test. */
+ boolean initBufferRecord() {
+ log("Init Record");
+ if (mMinRecorderBuffSizeInBytes <= 0) {
+
+ mMinRecorderBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
+ mChannelConfig, mAudioFormat);
+ log("RecorderRunnable: computing min buff size = " + mMinRecorderBuffSizeInBytes
+ + " bytes");
+ } else {
+ log("RecorderRunnable: using min buff size = " + mMinRecorderBuffSizeInBytes +
+ " bytes");
+ }
+
+ if (mMinRecorderBuffSizeInBytes <= 0) {
+ return false;
+ }
+
+ mMinRecorderBuffSizeInSamples = mMinRecorderBuffSizeInBytes / Constant.BYTES_PER_FRAME;
+ mBufferTestShortArray = new short[mMinRecorderBuffSizeInSamples];
+
+ final int cycles = 100;
+ int soundLevelSamples = (mSamplingRate / (int) mFrequency1) * cycles;
+ mSoundLevelSamples = new double[soundLevelSamples];
+ mAudioManager = (AudioManager) mContext.getSystemService(mContext.AUDIO_SERVICE);
+ mMaxVolume = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+
+ try {
+ mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
+ mChannelConfig, mAudioFormat, 2 * mMinRecorderBuffSizeInBytes);
+ } catch (IllegalArgumentException e) {
+ e.printStackTrace();
+ return false;
+ }
+ if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
+ mRecorder.release();
+ mRecorder = null;
+ return false;
+ }
+
+ final int targetFFTMs = 20; // we want each FFT to cover 20ms of samples
+ mFFTSamplingSize = targetFFTMs * mSamplingRate / Constant.MILLIS_PER_SECOND;
+ // round to the nearest power of 2
+ mFFTSamplingSize = (int) Math.pow(2, Math.round(Math.log(mFFTSamplingSize) / Math.log(2)));
+
+ if (mFFTSamplingSize < 2) {
+ mFFTSamplingSize = 2; // mFFTSamplingSize should be at least 2
+ }
+ mFFTOverlapSamples = mFFTSamplingSize / 2; // mFFTOverlapSamples is half of mFFTSamplingSize
+
+ return true;
+ }
+
+
+ boolean startRecording() {
+ synchronized (sRecordingLock) {
+ mIsRecording = true;
+ }
+
+ final int samplesDurationInSecond = 2;
+ int nNewSize = mSamplingRate * samplesDurationInSecond; // 2 seconds!
+ mSamples = new double[nNewSize];
+
+ boolean status = initRecord();
+ if (status) {
+ log("Ready to go.");
+ startRecordingForReal();
+ } else {
+ log("Recorder initialization error.");
+ synchronized (sRecordingLock) {
+ mIsRecording = false;
+ }
+ }
+
+ return status;
+ }
+
+
+ boolean startBufferRecording() {
+ synchronized (sRecordingLock) {
+ mIsRecording = true;
+ }
+
+ boolean status = initBufferRecord();
+ if (status) {
+ log("Ready to go.");
+ startBufferRecordingForReal();
+ } else {
+ log("Recorder initialization error.");
+ synchronized (sRecordingLock) {
+ mIsRecording = false;
+ }
+ }
+
+ return status;
+ }
+
+
+ void startRecordingForReal() {
+ mLatencyTestPipeShort.flush();
+ mRecorder.startRecording();
+ }
+
+
+ void startBufferRecordingForReal() {
+ mBufferTestPipeShort = new PipeShort(Constant.MAX_SHORTS);
+ mGlitchDetectionThread = new GlitchDetectionThread(mFrequency1, mFrequency2, mSamplingRate,
+ mFFTSamplingSize, mFFTOverlapSamples, mBufferTestDurationInSeconds,
+ mBufferTestWavePlotDurationInSeconds, mBufferTestPipeShort);
+ mGlitchDetectionThread.start();
+ mRecorder.startRecording();
+ }
+
+
+ void stopRecording() {
+ log("stop recording A");
+ synchronized (sRecordingLock) {
+ log("stop recording B");
+ mIsRecording = false;
+ }
+ stopRecordingForReal();
+ }
+
+
+ void stopRecordingForReal() {
+ log("stop recording for real");
+ if (mRecorder != null) {
+ mRecorder.stop();
+ }
+
+ if (mRecorder != null) {
+ mRecorder.release();
+ mRecorder = null;
+ }
+ }
+
+
+ public void run() {
+ // keeps the total time elapsed since the start of the test. Only used in buffer test.
+ long elapsedTimeMs;
+ mIsRunning = true;
+ while (mIsRunning) {
+ boolean isRecording;
+
+ synchronized (sRecordingLock) {
+ isRecording = mIsRecording;
+ }
+
+ if (isRecording && mRecorder != null) {
+ int nSamplesRead;
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ nSamplesRead = mRecorder.read(mAudioShortArray, 0,
+ mMinRecorderBuffSizeInSamples);
+
+ if (nSamplesRead > 0) {
+ mRecorderBufferPeriodInRecorder.collectBufferPeriod();
+ { // inject the tone that will be looped-back
+ int currentIndex = mSamplesIndex - 100; //offset
+ for (int i = 0; i < nSamplesRead; i++) {
+ if (currentIndex >= 0 && currentIndex < mAudioTone.length) {
+ mAudioShortArray[i] = mAudioTone[currentIndex];
+ }
+ currentIndex++;
+ }
+ }
+
+ mLatencyTestPipeShort.write(mAudioShortArray, 0, nSamplesRead);
+ if (isStillRoomToRecord()) { //record to vector
+ for (int i = 0; i < nSamplesRead; i++) {
+ double value = mAudioShortArray[i];
+ value = value / Short.MAX_VALUE;
+ if (mSamplesIndex < mSamples.length) {
+ mSamples[mSamplesIndex++] = value;
+ }
+
+ }
+ } else {
+ mIsRunning = false;
+ }
+ }
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ if (mIsRequestStop) {
+ endBufferTest();
+ } else {
+ // before we start the test, first adjust sound level
+ if (mIsAdjustingSoundLevel) {
+ nSamplesRead = mRecorder.read(mBufferTestShortArray, 0,
+ mMinRecorderBuffSizeInSamples);
+ if (nSamplesRead > 0) {
+ for (int i = 0; i < nSamplesRead; i++) {
+ double value = mBufferTestShortArray[i];
+ if (mSoundLevelSamplesIndex < mSoundLevelSamples.length) {
+ mSoundLevelSamples[mSoundLevelSamplesIndex++] = value;
+ } else {
+ // adjust the sound level to appropriate level
+ mIsAdjustingSoundLevel = AdjustSoundLevel();
+ mAdjustSoundLevelCount++;
+ mSoundLevelSamplesIndex = 0;
+ if (!mIsAdjustingSoundLevel) {
+ // end of sound level adjustment, notify AudioTrack
+ mAudioThread.setIsAdjustingSoundLevel(false);
+ mStartTimeMs = System.currentTimeMillis();
+ break;
+ }
+ }
+ }
+ }
+ } else {
+ // the end of test is controlled here. Once we've run for the specified
+ // test duration, end the test
+ elapsedTimeMs = System.currentTimeMillis() - mStartTimeMs;
+ if (elapsedTimeMs >= mBufferTestDurationMs) {
+ endBufferTest();
+ } else {
+ nSamplesRead = mRecorder.read(mBufferTestShortArray, 0,
+ mMinRecorderBuffSizeInSamples);
+ if (nSamplesRead > 0) {
+ mRecorderBufferPeriodInRecorder.collectBufferPeriod();
+ mBufferTestPipeShort.write(mBufferTestShortArray, 0,
+ nSamplesRead);
+ }
+ }
+ }
+ }
+ break;
+ }
+ }
+ } //synchronized
+ stopRecording(); //close this
+ }
+
+
+ /** Someone is requesting to stop the test, will stop the test even if the test is not done. */
+ public void requestStop() {
+ switch (mTestType) {
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+ mIsRequestStop = true;
+ break;
+ case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+ mIsRunning = false;
+ break;
+ }
+ }
+
+
+ /** Collect data then clean things up.*/
+ private void endBufferTest() {
+ mIsRunning = false;
+ mAllGlitches = mGlitchDetectionThread.getGlitches();
+ mGlitchingIntervalTooLong = mGlitchDetectionThread.getGlitchingIntervalTooLong();
+ mSamples = mGlitchDetectionThread.getWaveData();
+ endDetecting();
+ }
+
+
+ /** Clean everything up. */
+ public void endDetecting() {
+ mBufferTestPipeShort.flush();
+ mBufferTestPipeShort = null;
+ mGlitchDetectionThread.requestStop();
+ GlitchDetectionThread tempThread = mGlitchDetectionThread;
+ mGlitchDetectionThread = null;
+ try {
+ tempThread.join(Constant.JOIN_WAIT_TIME_MS);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+ /**
+ * Adjust the sound level such that the buffer test can run with small noise disturbance.
+ * Return a boolean value to indicate whether or not the sound level has adjusted to an
+ * appropriate level.
+ */
+ private boolean AdjustSoundLevel() {
+ // if after adjusting 20 times, we still cannot get into the volume we want, increase the
+ // limit range, so it's easier to get into the volume we want.
+ if (mAdjustSoundLevelCount != 0 && mAdjustSoundLevelCount % 20 == 0) {
+ mSoundTopLimit += 0.1;
+ mSoundBotLimit -= 0.1;
+ }
+
+ double topThreshold = Short.MAX_VALUE * mSoundTopLimit;
+ double botThreshold = Short.MAX_VALUE * mSoundBotLimit;
+ double currentMax = mSoundLevelSamples[0];
+ int currentVolume = mAudioManager.getStreamVolume(AudioManager.STREAM_MUSIC);
+
+ // since it's a sine wave, we are only checking max positive value
+ for (int i = 1; i < mSoundLevelSamples.length; i++) {
+ if (mSoundLevelSamples[i] > topThreshold) { // once a sample exceed, return
+ // adjust sound level down
+ currentVolume--;
+ mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, currentVolume, 0);
+ return true;
+ }
+
+ if (mSoundLevelSamples[i] > currentMax) {
+ currentMax = mSoundLevelSamples[i];
+ }
+ }
+
+ if (currentMax < botThreshold) {
+ // adjust sound level up
+ if (currentVolume < mMaxVolume) {
+ currentVolume++;
+ mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC,
+ currentVolume, 0);
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ return false;
+ }
+
+
+ /** Check if there's any room left in mSamples. */
+ public boolean isStillRoomToRecord() {
+ boolean result = false;
+ if (mSamples != null) {
+ if (mSamplesIndex < mSamples.length) {
+ result = true;
+ }
+ }
+
+ return result;
+ }
+
+
+ /**
+ * this function creates the tone that will be injected (and then loopback) in the Latency test.
+ * It's a sine wave whose magnitude increases than decreases
+ */
+ //TODO make this a subclass of ToneGeneration
+ private void createAudioTone(int sampleSize, int frequency, boolean taperEnds) {
+ mAudioTone = new short[sampleSize];
+ double phase = 0;
+
+ for (int i = 0; i < sampleSize; i++) {
+ double factor = 1.0; // decide the magnitude of the sine wave
+ if (taperEnds) {
+ if (i < sampleSize / 2) {
+ factor = 2.0 * i / sampleSize;
+ } else {
+ factor = 2.0 * (sampleSize - i) / sampleSize;
+ }
+ }
+
+ short value = (short) (factor * Math.sin(phase) * 10000);
+ mAudioTone[i] = value;
+ phase += Constant.TWO_PI * frequency / mSamplingRate;
+ }
+
+ while (phase > Constant.TWO_PI)
+ phase -= Constant.TWO_PI;
+ }
+
+
+ public void setBufferTestDurationInSeconds(int bufferTestDurationInSeconds) {
+ mBufferTestDurationInSeconds = bufferTestDurationInSeconds;
+ mBufferTestDurationMs = Constant.MILLIS_PER_SECOND * mBufferTestDurationInSeconds;
+ }
+
+
+ public int[] getAllGlitches() {
+ return mAllGlitches;
+ }
+
+
+ public boolean getGlitchingIntervalTooLong() {
+ return mGlitchingIntervalTooLong;
+ }
+
+
+ public double[] getWaveData() {
+ return mSamples;
+ }
+
+
+ public int getFFTSamplingSize() {
+ return mFFTSamplingSize;
+ }
+
+
+ public int getFFTOverlapSamples() {
+ return mFFTOverlapSamples;
+ }
+
+
+ private static void log(String msg) {
+ Log.v(TAG, msg);
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
index 43c6099..fc26634 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
@@ -18,17 +18,10 @@ package org.drrickorang.loopback;
import android.app.Activity;
import android.content.Intent;
-import android.content.Context;
-import android.content.pm.PackageManager;
-import android.media.AudioFormat;
-import android.media.AudioManager;
-import android.media.AudioRecord;
-import android.media.AudioTrack;
-import android.os.Build;
+import android.content.res.Resources;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
-import android.widget.SeekBar;
import android.widget.Spinner;
import android.widget.ArrayAdapter;
import android.widget.AdapterView.OnItemSelectedListener;
@@ -37,21 +30,26 @@ import android.widget.NumberPicker;
import android.widget.NumberPicker.OnValueChangeListener;
import android.widget.TextView;
+
+/**
+ * This activity displays all settings that can be adjusted by the user.
+ */
+
public class SettingsActivity extends Activity implements OnItemSelectedListener,
-OnValueChangeListener {
- /**
- * Called with the activity is first created.
- */
- Spinner mSpinnerMicSource;
- Spinner mSpinnerSamplingRate;
- Spinner mSpinnerAudioThreadType;
- NumberPicker mNumberPickerPlaybackBuffer;
- NumberPicker mNumberPickerRecordBuffer;
+ OnValueChangeListener {
+ private static final String TAG = "SettingsActivity";
- TextView mTextSettingsInfo;
+ private Spinner mSpinnerMicSource;
+ private Spinner mSpinnerSamplingRate;
+ private Spinner mSpinnerAudioThreadType;
+ private NumberPicker mNumberPickerPlayerBuffer;
+ private NumberPicker mNumberPickerRecorderBuffer;
+ private NumberPicker mNumberPickerBufferTestDuration; // in seconds
+ private NumberPicker mNumberPickerBufferTestWavePlotDuration; //in seconds
+ private TextView mTextSettingsInfo;
+
+ ArrayAdapter<CharSequence> mAdapterSamplingRate;
- ArrayAdapter<CharSequence> adapterSamplingRate;
- int bytesPerFrame;
@Override
public void onCreate(Bundle savedInstanceState) {
@@ -59,11 +57,8 @@ OnValueChangeListener {
// Set the layout for this activity. You can find it
View view = getLayoutInflater().inflate(R.layout.settings_activity, null);
setContentView(view);
-
-
mTextSettingsInfo = (TextView) findViewById(R.id.textSettingsInfo);
-
int micSource = getApp().getMicSource();
mSpinnerMicSource = (Spinner) findViewById(R.id.spinnerMicSource);
ArrayAdapter<CharSequence> adapterMicSource = ArrayAdapter.createFromResource(this,
@@ -73,26 +68,22 @@ OnValueChangeListener {
// Apply the adapter to the spinner
mSpinnerMicSource.setAdapter(adapterMicSource);
//set current value
-// String currentValue = String.valueOf(samplingRate);
-// int nPosition = adapter.getPosition(currentValue);
mSpinnerMicSource.setSelection(micSource, false);
mSpinnerMicSource.setOnItemSelectedListener(this);
-
- bytesPerFrame = getApp().BYTES_PER_FRAME;
int samplingRate = getApp().getSamplingRate();
//init spinner, etc
mSpinnerSamplingRate = (Spinner) findViewById(R.id.spinnerSamplingRate);
- adapterSamplingRate = ArrayAdapter.createFromResource(this,
+ mAdapterSamplingRate = ArrayAdapter.createFromResource(this,
R.array.samplingRate_array, android.R.layout.simple_spinner_item);
// Specify the layout to use when the list of choices appears
- adapterSamplingRate.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ mAdapterSamplingRate.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
- mSpinnerSamplingRate.setAdapter(adapterSamplingRate);
+ mSpinnerSamplingRate.setAdapter(mAdapterSamplingRate);
//set current value
String currentValue = String.valueOf(samplingRate);
- int nPosition = adapterSamplingRate.getPosition(currentValue);
- mSpinnerSamplingRate.setSelection(nPosition,false);
+ int nPosition = mAdapterSamplingRate.getPosition(currentValue);
+ mSpinnerSamplingRate.setSelection(nPosition, false);
mSpinnerSamplingRate.setOnItemSelectedListener(this);
//spinner native
@@ -105,137 +96,209 @@ OnValueChangeListener {
// Apply the adapter to the spinner
mSpinnerAudioThreadType.setAdapter(adapter2);
//set current value
-// String currentValue = String.valueOf(samplingRate);
-// int nPosition = adapter.getPosition(currentValue);
mSpinnerAudioThreadType.setSelection(audioThreadType, false);
if (!getApp().isSafeToUseSles())
mSpinnerAudioThreadType.setEnabled(false);
mSpinnerAudioThreadType.setOnItemSelectedListener(this);
- //playback buffer
- mNumberPickerPlaybackBuffer = (NumberPicker) findViewById(R.id.numberpickerPlaybackBuffer);
- mNumberPickerPlaybackBuffer.setMaxValue(8000);
- mNumberPickerPlaybackBuffer.setMinValue(16);
- mNumberPickerPlaybackBuffer.setWrapSelectorWheel(false);
- mNumberPickerPlaybackBuffer.setOnValueChangedListener(this);
- int playbackBuffer = getApp().getPlayBufferSizeInBytes()/bytesPerFrame;
- mNumberPickerPlaybackBuffer.setValue(playbackBuffer);
- log("playbackbuffer = " + playbackBuffer);
+
+ // buffer test duration in seconds
+ int bufferTestDurationMax = 36000;
+ int bufferTestDurationMin = 1;
+ mNumberPickerBufferTestDuration = (NumberPicker)
+ findViewById(R.id.numberpickerBufferTestDuration);
+ mNumberPickerBufferTestDuration.setMaxValue(bufferTestDurationMax);
+ mNumberPickerBufferTestDuration.setMinValue(bufferTestDurationMin);
+ mNumberPickerBufferTestDuration.setWrapSelectorWheel(false);
+ mNumberPickerBufferTestDuration.setOnValueChangedListener(this);
+ int bufferTestDuration = getApp().getBufferTestDuration();
+ mNumberPickerBufferTestDuration.setValue(bufferTestDuration);
+
+ // set the string to display bufferTestDurationMax
+ Resources res = getResources();
+ String string1 = res.getString(R.string.labelBufferTestDuration, bufferTestDurationMax);
+ TextView textView = (TextView) findViewById(R.id.textBufferTestDuration);
+ textView.setText(string1);
+
+ // wave plot duration for buffer test in seconds
+ int bufferTestWavePlotDurationMax = 120;
+ int bufferTestWavePlotDurationMin = 1;
+ mNumberPickerBufferTestWavePlotDuration = (NumberPicker)
+ findViewById(R.id.numberPickerBufferTestWavePlotDuration);
+ mNumberPickerBufferTestWavePlotDuration.setMaxValue(bufferTestWavePlotDurationMax);
+ mNumberPickerBufferTestWavePlotDuration.setMinValue(bufferTestWavePlotDurationMin);
+ mNumberPickerBufferTestWavePlotDuration.setWrapSelectorWheel(false);
+ mNumberPickerBufferTestWavePlotDuration.setOnValueChangedListener(this);
+ int bufferTestWavePlotDuration = getApp().getBufferTestWavePlotDuration();
+ mNumberPickerBufferTestWavePlotDuration.setValue(bufferTestWavePlotDuration);
+
+ // set the string to display bufferTestWavePlotDurationMax
+ string1 = res.getString(R.string.labelBufferTestWavePlotDuration,
+ bufferTestWavePlotDurationMax);
+ textView = (TextView) findViewById(R.id.textBufferTestWavePlotDuration);
+ textView.setText(string1);
+
+ //player buffer
+ int playerBufferMax = 8000;
+ int playerBufferMin = 16;
+ mNumberPickerPlayerBuffer = (NumberPicker) findViewById(R.id.numberpickerPlayerBuffer);
+ mNumberPickerPlayerBuffer.setMaxValue(playerBufferMax);
+ mNumberPickerPlayerBuffer.setMinValue(playerBufferMin);
+ mNumberPickerPlayerBuffer.setWrapSelectorWheel(false);
+ mNumberPickerPlayerBuffer.setOnValueChangedListener(this);
+ int playerBuffer = getApp().getPlayerBufferSizeInBytes()/ Constant.BYTES_PER_FRAME;
+ mNumberPickerPlayerBuffer.setValue(playerBuffer);
+ log("playerbuffer = " + playerBuffer);
+
+ // set the string to display playerBufferMax
+ string1 = res.getString(R.string.labelPlayerBuffer, playerBufferMax);
+ textView = (TextView) findViewById(R.id.textPlayerBuffer);
+ textView.setText(string1);
+
//record buffer
- mNumberPickerRecordBuffer = (NumberPicker) findViewById(R.id.numberpickerRecordBuffer);
- mNumberPickerRecordBuffer.setMaxValue(8000);
- mNumberPickerRecordBuffer.setMinValue(16);
- mNumberPickerRecordBuffer.setWrapSelectorWheel(false);
- mNumberPickerRecordBuffer.setOnValueChangedListener(this);
- int recordBuffer = getApp().getRecordBufferSizeInBytes()/bytesPerFrame;
- mNumberPickerRecordBuffer.setValue(recordBuffer);
- log("recordBuffer = " + recordBuffer);
+ int recorderBufferMax = 8000;
+ int recorderBufferMin = 16;
+ mNumberPickerRecorderBuffer = (NumberPicker) findViewById(R.id.numberpickerRecorderBuffer);
+ mNumberPickerRecorderBuffer.setMaxValue(recorderBufferMax);
+ mNumberPickerRecorderBuffer.setMinValue(recorderBufferMin);
+ mNumberPickerRecorderBuffer.setWrapSelectorWheel(false);
+ mNumberPickerRecorderBuffer.setOnValueChangedListener(this);
+ int recorderBuffer = getApp().getRecorderBufferSizeInBytes()/ Constant.BYTES_PER_FRAME;
+ mNumberPickerRecorderBuffer.setValue(recorderBuffer);
+ log("recorderBuffer = " + recorderBuffer);
+
+ // set the string to display playerBufferMax
+ string1 = res.getString(R.string.labelRecorderBuffer, recorderBufferMax);
+ textView = (TextView) findViewById(R.id.textRecorderBuffer);
+ textView.setText(string1);
+
refresh();
}
+
public void onDestroy() {
super.onDestroy();
}
+
+
@Override
public void onBackPressed() {
-
log("on back pressed");
settingsChanged();
finish();
}
+
+
private void refresh() {
- int playbackBuffer = getApp().getPlayBufferSizeInBytes()/bytesPerFrame;
- mNumberPickerPlaybackBuffer.setValue(playbackBuffer);
- int recordBuffer = getApp().getRecordBufferSizeInBytes()/bytesPerFrame;
- mNumberPickerRecordBuffer.setValue(recordBuffer);
- if (getApp().getAudioThreadType() == LoopbackApplication.AUDIO_THREAD_TYPE_JAVA)
- mNumberPickerRecordBuffer.setEnabled(true);
- else
- mNumberPickerRecordBuffer.setEnabled(false);
+ int bufferTestDuration = getApp().getBufferTestDuration();
+ mNumberPickerBufferTestDuration.setValue(bufferTestDuration);
+ int bufferTestWavePlotDuration = getApp().getBufferTestWavePlotDuration();
+ mNumberPickerBufferTestWavePlotDuration.setValue(bufferTestWavePlotDuration);
- int samplingRate = getApp().getSamplingRate();
+ int playerBuffer = getApp().getPlayerBufferSizeInBytes() / Constant.BYTES_PER_FRAME;
+ mNumberPickerPlayerBuffer.setValue(playerBuffer);
+ int recorderBuffer = getApp().getRecorderBufferSizeInBytes() / Constant.BYTES_PER_FRAME;
+ mNumberPickerRecorderBuffer.setValue(recorderBuffer);
+
+ if (getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_JAVA) {
+ mNumberPickerRecorderBuffer.setEnabled(true);
+ } else {
+ mNumberPickerRecorderBuffer.setEnabled(false);
+ }
+ int samplingRate = getApp().getSamplingRate();
String currentValue = String.valueOf(samplingRate);
- int nPosition = adapterSamplingRate.getPosition(currentValue);
+ int nPosition = mAdapterSamplingRate.getPosition(currentValue);
mSpinnerSamplingRate.setSelection(nPosition);
-
-// try {
-// int versionCode = getApplicationContext().getPackageManager().getPackageInfo(getApplicationContext().getPackageName(), 0).versionCode;
-// String versionName = getApplicationContext().getPackageManager().getPackageInfo(getApplicationContext().getPackageName(), 0).versionName;
-// mTextSettingsInfo.setText("SETTINGS - Ver. " +versionCode +"."+ versionName + " | " +Build.MODEL + " | " + Build.FINGERPRINT);
-// } catch (PackageManager.NameNotFoundException e) {
-// e.printStackTrace();
-// }
String info = getApp().getSystemInfo();
- mTextSettingsInfo.setText(String.format("SETTINGS - "+info));
-
+ mTextSettingsInfo.setText("SETTINGS - " + info);
}
- public void onItemSelected(AdapterView<?> parent, View view,
- int pos, long id) {
+
+
+ public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
// An item was selected. You can retrieve the selected item using
// parent.getItemAtPosition(pos)
log("item selected!");
- switch(parent.getId()) {
- case R.id.spinnerSamplingRate:
- String stringValue = mSpinnerSamplingRate.getSelectedItem().toString();
- int samplingRate = Integer.parseInt(stringValue);
- getApp().setSamplingRate(samplingRate);
- settingsChanged();
- log("Sampling Rate: "+ stringValue);
- break;
- case R.id.spinnerAudioThreadType:
- int audioThreadType = mSpinnerAudioThreadType.getSelectedItemPosition();
- getApp().setAudioThreadType(audioThreadType);
- getApp().computeDefaults();
- settingsChanged();
- log("AudioThreadType:" + audioThreadType);
- refresh();
- break;
- case R.id.spinnerMicSource:
- int micSource = mSpinnerMicSource.getSelectedItemPosition();
- getApp().setMicSource(micSource);
- settingsChanged();
- log("mic Source:" + micSource);
- refresh();
- break;
+
+ switch (parent.getId()) {
+ case R.id.spinnerSamplingRate:
+ String stringValue = mSpinnerSamplingRate.getSelectedItem().toString();
+ int samplingRate = Integer.parseInt(stringValue);
+ getApp().setSamplingRate(samplingRate);
+ settingsChanged();
+ log("Sampling Rate: " + stringValue);
+ refresh();
+ break;
+ case R.id.spinnerAudioThreadType:
+ int audioThreadType = mSpinnerAudioThreadType.getSelectedItemPosition();
+ getApp().setAudioThreadType(audioThreadType);
+ getApp().computeDefaults();
+ settingsChanged();
+ log("AudioThreadType:" + audioThreadType);
+ refresh();
+ break;
+ case R.id.spinnerMicSource:
+ int micSource = mSpinnerMicSource.getSelectedItemPosition();
+ getApp().setMicSource(micSource);
+ settingsChanged();
+ log("mic Source:" + micSource);
+ refresh();
+ break;
}
}
- public void onValueChange (NumberPicker picker, int oldVal, int newVal) {
- if (picker == mNumberPickerPlaybackBuffer) {
- log("playback new size " + oldVal + " -> " + newVal);
- getApp().setPlayBufferSizeInBytes(newVal*bytesPerFrame);
- } else if (picker == mNumberPickerRecordBuffer) {
- log("record new size " + oldVal + " -> " + newVal);
- getApp().setRecordBufferSizeInBytes(newVal*bytesPerFrame);
+
+ public void onValueChange(NumberPicker picker, int oldVal, int newVal) {
+ if (picker == mNumberPickerPlayerBuffer) {
+ log("player buffer new size " + oldVal + " -> " + newVal);
+ getApp().setPlayerBufferSizeInBytes(newVal * Constant.BYTES_PER_FRAME);
+ int audioThreadType = mSpinnerAudioThreadType.getSelectedItemPosition();
+ // in native mode, recorder buffer size = player buffer size
+ if (audioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE){
+ getApp().setRecorderBufferSizeInBytes(newVal * Constant.BYTES_PER_FRAME);
+ }
+ } else if (picker == mNumberPickerRecorderBuffer) {
+ log("recorder buffer new size " + oldVal + " -> " + newVal);
+ getApp().setRecorderBufferSizeInBytes(newVal * Constant.BYTES_PER_FRAME);
+ } else if (picker == mNumberPickerBufferTestDuration) {
+ log("buffer test new duration: " + oldVal + " -> " + newVal);
+ getApp().setBufferTestDuration(newVal);
+ } else if (picker == mNumberPickerBufferTestWavePlotDuration) {
+ log("buffer test's wave plot new duration: " + oldVal + " -> " + newVal);
+ getApp().setBufferTestWavePlotDuration(newVal);
}
+ settingsChanged();
+ refresh();
}
+
private void settingsChanged() {
Intent intent = new Intent();
setResult(RESULT_OK, intent);
}
+
public void onNothingSelected(AdapterView<?> parent) {
// Another interface callback
}
+
/** Called when the user clicks the button */
public void onButtonClick(View view) {
- //refresh();
getApp().computeDefaults();
refresh();
}
+// Below is work in progress by Ricardo
// public void onButtonRecordDefault(View view) {
// int samplingRate = getApp().getSamplingRate();
//
-// int minRecBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate,
+// int minRecorderBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate,
// AudioFormat.CHANNEL_IN_MONO,
// AudioFormat.ENCODING_PCM_16BIT);
-// getApp().setRecordBufferSizeInBytes(minRecBufferSizeInBytes);
+// getApp().setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
//
// refresh();
// }
@@ -243,33 +306,35 @@ OnValueChangeListener {
// private void computeDefaults() {
//
//// if (getApp().getAudioThreadType() == LoopbackApplication.AUDIO_THREAD_TYPE_JAVA) {
-//// mNumberPickerRecordBuffer.setEnabled(true);
+//// mNumberPickerRecorderBuffer.setEnabled(true);
//// else
-//// mNumberPickerRecordBuffer.setEnabled(false);
+//// mNumberPickerRecorderBuffer.setEnabled(false);
//
// int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
// getApp().setSamplingRate(samplingRate);
-// int minPlayBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
+// int minPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
// AudioFormat.CHANNEL_OUT_MONO,
// AudioFormat.ENCODING_PCM_16BIT);
-// getApp().setPlayBufferSizeInBytes(minPlayBufferSizeInBytes);
+// getApp().setPlayerBufferSizeInBytes(minPlayerBufferSizeInBytes);
//
-// int minRecBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate,
+// int minRecorderBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate,
// AudioFormat.CHANNEL_IN_MONO,
// AudioFormat.ENCODING_PCM_16BIT);
-// getApp().setRecordBufferSizeInBytes(minRecBufferSizeInBytes);
-// getApp().setRecordBufferSizeInBytes(minRecBufferSizeInBytes);
+// getApp().setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
+// getApp().setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
//
// log("computed defaults");
//
// }
+
private LoopbackApplication getApp() {
return (LoopbackApplication) this.getApplication();
}
+
private static void log(String msg) {
- Log.v("Settings", msg);
+ Log.v(TAG, msg);
}
}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java
new file mode 100644
index 0000000..a0b7fd9
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class generates a sine wave with given frequency and samplingRate.
+ * It keeps a member variable "mPhase", so as it continually be called, it will continue to generate
+ * the next section of the sine wave.
+ */
+
+public class SineWaveTone extends ToneGeneration {
+ private int mCount; // counts the total samples produced.
+ private double mPhase; // current phase
+ private double mAmplitude; // this value should be from 0 to 1.0
+ private final double mPhaseIncrement; // phase incrementation associated with mFrequency
+
+
+ public SineWaveTone(int samplingRate, double frequency) {
+ super(samplingRate);
+ mCount = 0;
+ mPhaseIncrement = Constant.TWO_PI * (frequency / mSamplingRate); // should < 2pi
+ mAmplitude = Constant.SINE_WAVE_AMPLITUDE;
+ }
+
+
+ @Override
+ public void generateTone(short[] tone, int size) {
+ for (int i = 0; i < size; i++) {
+ short value1 = (short) (mAmplitude * Math.sin(mPhase) * Short.MAX_VALUE);
+ tone[i] = value1;
+
+ mPhase += mPhaseIncrement;
+ // insert glitches if mIsGlitchEnabled == true, and insert it for every second
+ if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+ mPhase += mPhaseIncrement;
+ }
+
+ mCount++;
+
+ if (mPhase >= Constant.TWO_PI) {
+ mPhase -= Constant.TWO_PI;
+ }
+ }
+ }
+
+
+ @Override
+ public void generateTone(double[] tone, int size) {
+ for (int i = 0; i < size; i++) {
+ double value1 = mAmplitude * Math.sin(mPhase);
+ tone[i] = value1;
+
+ mPhase += mPhaseIncrement;
+ // insert glitches if mIsGlitchEnabled == true, and insert it for every second
+ if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+ mPhase += mPhaseIncrement;
+ }
+
+ mCount++;
+
+ if (mPhase >= Constant.TWO_PI) {
+ mPhase -= Constant.TWO_PI;
+ }
+ }
+ }
+
+
+ @Override
+ public void resetPhases() {
+ mPhase = 0;
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/ToneGeneration.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/ToneGeneration.java
new file mode 100644
index 0000000..0fde60a
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/ToneGeneration.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class is used to generates different kinds of tones.
+ */
+
+public abstract class ToneGeneration {
+ protected int mSamplingRate;
+ protected boolean mIsGlitchEnabled = false; // indicates we are inserting glitches or not
+
+
+ public ToneGeneration(int samplingRate) {
+ mSamplingRate = samplingRate;
+ }
+
+
+ /** Store samples into "tone". Value of samples are from -32768 to 32767. */
+ public abstract void generateTone(short[] tone, int size);
+
+
+ /**
+ * Store samples into "tone". Value of samples are from -1.0 to 1.0.
+ * This function is not supposed to be used to create tone that is going to pass
+ * into AudioTrack.write() as it only takes in float.
+ */
+ public abstract void generateTone(double[] tone, int size);
+
+
+ /** Reset all the phases to zero. */
+ public abstract void resetPhases();
+
+
+ /**
+ * Set the value of mIsGlitchEnabled. If mIsGlitchEnabled == true, will insert glitches to
+ * the generated tone.
+ */
+ public void setGlitchEnabled(boolean isGlitchEnabled) {
+ mIsGlitchEnabled = isGlitchEnabled;
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java
new file mode 100644
index 0000000..35874b4
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class generates a mix of two sine waves with frequency1, frequency2, and samplingRate.
+ * It keeps two member variable "mPhase1" and "mPhase2", so as it continually be called,
+ * it will continue to generate the next section of the sine wave.
+ */
+
+public class TwoSineWavesTone extends ToneGeneration {
+ private int mCount; // counts the total samples produced.
+ private double mPhase1; // current phase associated with mFrequency1
+ private double mPhase2; // current phase associated with mFrequency2
+ private double mAmplitude; // this value should be from 0 to 1.0
+ private final double mPhaseIncrement1; // phase incrementation associated with mFrequency1
+ private final double mPhaseIncrement2; // phase incrementation associated with mFrequency2
+
+
+ /**
+ * Currently, this class is never used, but it can be used in the future to create a different
+ * kind of wave when running the test.
+ */
+ public TwoSineWavesTone(int samplingRate, double frequency1, double frequency2) {
+ super(samplingRate);
+ mCount = 0;
+ mPhaseIncrement1 = Constant.TWO_PI * (frequency1 / mSamplingRate); // should < 2pi
+ mPhaseIncrement2 = Constant.TWO_PI * (frequency2 / mSamplingRate); // should < 2pi
+ mAmplitude = Constant.TWO_SINE_WAVES_AMPLITUDE;
+ }
+
+
+ @Override
+ public void generateTone(short[] tone, int size) {
+ for (int i = 0; i < size; i++) {
+ short value1 = (short) (mAmplitude * Math.sin(mPhase1) * Short.MAX_VALUE);
+ short value2 = (short) (mAmplitude * Math.sin(mPhase2) * Short.MAX_VALUE);
+ tone[i] = (short) (value1 + value2);
+
+ mPhase1 += mPhaseIncrement1;
+ mPhase2 += mPhaseIncrement2;
+
+ // insert glitches for every second if mIsGlitchEnabled == true.
+ if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+ mPhase1 += mPhaseIncrement1;
+ mPhase2 += mPhaseIncrement2;
+ }
+
+ mCount++;
+
+ if (mPhase1 > Constant.TWO_PI) {
+ mPhase1 -= Constant.TWO_PI;
+ }
+ if (mPhase2 > Constant.TWO_PI) {
+ mPhase2 -= Constant.TWO_PI;
+ }
+
+ }
+ }
+
+
+ @Override
+ public void generateTone(double[] tone, int size) {
+ for (int i = 0; i < size; i++) {
+ double value1 = mAmplitude * Math.sin(mPhase1);
+ double value2 = mAmplitude * Math.sin(mPhase2);
+ tone[i] = value1 + value2;
+
+ mPhase1 += mPhaseIncrement1;
+ mPhase2 += mPhaseIncrement2;
+ // insert glitches if mIsGlitchEnabled == true, and insert it for every second
+ if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+ mPhase1 += mPhaseIncrement1;
+ mPhase2 += mPhaseIncrement2;
+ }
+
+ mCount++;
+
+ if (mPhase1 > Constant.TWO_PI) {
+ mPhase1 -= Constant.TWO_PI;
+ }
+ if (mPhase2 > Constant.TWO_PI) {
+ mPhase2 -= Constant.TWO_PI;
+ }
+
+ }
+ }
+
+
+ @Override
+ public void resetPhases() {
+ mPhase1 = 0;
+ mPhase2 = 0;
+ }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java
new file mode 100644
index 0000000..2d74b29
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class contains functions that can be reused in different classes.
+ */
+
+public class Utilities {
+
+
+ /** Multiply the input array with a hanning window. */
+ public static double[] hanningWindow(double[] samples) {
+ int length = samples.length;
+ final double alpha = 0.5;
+ final double beta = 0.5;
+ double coefficient;
+ for (int i = 0; i < length; i++) {
+ coefficient = (Constant.TWO_PI * i) / (length - 1);
+ samples[i] *= alpha - beta * Math.cos(coefficient);
+ }
+
+ return samples;
+ }
+
+
+ /** Round up to the nearest power of 2. */
+ public static int roundup(int size)
+ {
+ // Integer.numberOfLeadingZeros() returns 32 for zero input
+ if (size == 0) {
+ size = 1;
+ }
+
+ int lz = Integer.numberOfLeadingZeros(size);
+ int rounded = 0x80000000 >>> lz;
+ // 0x800000001 and higher are actually rounded _down_ to prevent overflow
+ if (size > rounded && lz > 0) {
+ rounded <<= 1;
+ }
+ return rounded;
+ }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
index bb91b31..eaf068a 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
@@ -16,83 +16,150 @@
package org.drrickorang.loopback;
-import android.view.View;
+import java.util.Arrays;
+
import android.content.Context;
-import android.util.AttributeSet;
-import android.util.Log;
import android.graphics.Canvas;
import android.graphics.Paint;
+import android.graphics.Path;
+import android.graphics.Paint.Style;
+import android.util.AttributeSet;
+import android.util.Log;
import android.view.GestureDetector;
-//import android.view.GestureDetector.OnGestureListener;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
-//import android.view.ScaleGestureDetector.SimpleOnScaleGestureListener;
-import java.util.Arrays;
-import android.graphics.Path;
-import android.graphics.Paint.Style;
+import android.view.View;
+
+/**
+ * This view is the wave plot shows on the main activity.
+ */
public class WavePlotView extends View {
- private static final String TAG = "WavePlot";
+ private static final String TAG = "WavePlotView";
- private double [] bigDataArray;
- private double [] valuesArray; //top points to plot
- private double [] valuesArray2; //bottom
+ private double [] mBigDataArray;
+ private double [] mValuesArray; //top points to plot
+ private double [] mValuesArray2; //bottom
- private double [] insetArray;
- private double [] insetArray2;
- private int mInsetSize=20;
+ private double [] mInsetArray;
+ private double [] mInsetArray2;
+ private int mInsetSize = 20;
private double mZoomFactorX = 1.0; //1:1 1 sample / point . Note: Point != pixel.
- private int mCurrentOffset = 0;
- private int mArraySize = 100; //default size
+ private int mCurrentOffset = 0;
+ private int mArraySize = 100; //default size
+ private int mSamplingRate;
- public int mSamplingRate = 48000;
-
- private GestureDetector mDetector;
- private ScaleGestureDetector mSGDetector;
+ private GestureDetector mDetector;
+ private ScaleGestureDetector mSGDetector;
private MyScaleGestureListener mSGDListener;
private int mWidth;
private int mHeight;
+ private Paint mMyPaint;
+ private Paint mPaintZoomBox;
+ private Paint mPaintInsetBackground;
+ private Paint mPaintInsetBorder;
+ private Paint mPaintInset;
+ private Paint mPaintGrid;
+ private Paint mPaintGridText;
+
public WavePlotView(Context context, AttributeSet attrs) {
super(context, attrs);
mSGDListener = new MyScaleGestureListener();
mDetector = new GestureDetector(context, new MyGestureListener());
mSGDetector = new ScaleGestureDetector(context, mSGDListener);
+ initPaints();
+ }
+
+
+ /** Initiate all the Paint objects. */
+ private void initPaints() {
+ final int COLOR_WAVE = 0xFF1E4A99;
+ final int COLOR_ZOOM_BOX = 0X50E0E619;
+ final int COLOR_INSET_BACKGROUND = 0xFFFFFFFF;
+ final int COLOR_INSET_BORDER = 0xFF002260;
+ final int COLOR_INSET_WAVE = 0xFF910000;
+ final int COLOR_GRID = 0x7F002260;
+ final int COLOR_GRID_TEXT = 0xFF002260;
+
+ mMyPaint = new Paint();
+ mMyPaint.setColor(COLOR_WAVE);
+ mMyPaint.setAntiAlias(true);
+ mMyPaint.setStyle(Style.FILL_AND_STROKE);
+ mMyPaint.setStrokeWidth(1);
+
+ mPaintZoomBox = new Paint();
+ mPaintZoomBox.setColor(COLOR_ZOOM_BOX);
+ mPaintZoomBox.setAntiAlias(true);
+ mPaintZoomBox.setStyle(Style.FILL);
+
+ mPaintInsetBackground = new Paint();
+ mPaintInsetBackground.setColor(COLOR_INSET_BACKGROUND);
+ mPaintInsetBackground.setAntiAlias(true);
+ mPaintInsetBackground.setStyle(Style.FILL);
+
+ mPaintInsetBorder = new Paint();
+ mPaintInsetBorder.setColor(COLOR_INSET_BORDER);
+ mPaintInsetBorder.setAntiAlias(true);
+ mPaintInsetBorder.setStyle(Style.STROKE);
+ mPaintInsetBorder.setStrokeWidth(1);
+
+ mPaintInset = new Paint();
+ mPaintInset.setColor(COLOR_INSET_WAVE);
+ mPaintInset.setAntiAlias(true);
+ mPaintInset.setStyle(Style.FILL_AND_STROKE);
+ mPaintInset.setStrokeWidth(1);
+
+ final int textSize = 25;
+ mPaintGrid = new Paint(Paint.ANTI_ALIAS_FLAG);
+ mPaintGrid.setColor(COLOR_GRID); //gray
+ mPaintGrid.setTextSize(textSize);
+
+ mPaintGridText = new Paint(Paint.ANTI_ALIAS_FLAG);
+ mPaintGridText.setColor(COLOR_GRID_TEXT); //BLACKgray
+ mPaintGridText.setTextSize(textSize);
}
+
+ /** Must call this function to set mSamplingRate before plotting the wave. */
public void setSamplingRate(int samplingRate) {
mSamplingRate = samplingRate;
}
+
public double getZoom() {
return mZoomFactorX;
}
- //returns max zoom out value (>1.0)
+
+ /** Return max zoom out value (> 1.0)/ */
public double getMaxZoomOut() {
double maxZoom = 1.0;
- if (bigDataArray != null) {
- int n = bigDataArray.length;
+ if (mBigDataArray != null) {
+ int n = mBigDataArray.length;
maxZoom = ((double) n) / mArraySize;
}
+
return maxZoom;
}
+
+
public double getMinZoomOut() {
double minZoom = 1.0;
-
return minZoom;
}
+
public int getOffset() {
return mCurrentOffset;
}
- public void setZoom(double zoom) {
+ public void setZoom(double zoom) {
double newZoom = zoom;
double maxZoom = getMaxZoomOut();
double minZoom = getMinZoomOut();
@@ -103,195 +170,156 @@ public class WavePlotView extends View {
if (newZoom > maxZoom)
newZoom = maxZoom;
+
mZoomFactorX = newZoom;
//fix offset if this is the case
- setOffset(0,true); //just touch offset in case it needs to be fixed.
+ setOffset(0, true); //just touch offset in case it needs to be fixed.
}
- public void setOffset(int sampleOffset, boolean relative) {
+ public void setOffset(int sampleOffset, boolean relative) {
int newOffset = sampleOffset;
+
if (relative) {
newOffset = mCurrentOffset + sampleOffset;
}
- if (bigDataArray != null) {
- int n = bigDataArray.length;
+ if (mBigDataArray != null) {
+ int n = mBigDataArray.length;
//update offset if last sample is more than expected
int lastSample = newOffset + (int)getWindowSamples();
if (lastSample >= n) {
- int delta = lastSample-n;
- newOffset -= lastSample-n;
+ int delta = lastSample - n;
+ newOffset -= delta;
}
- if (newOffset <0)
+ if (newOffset < 0)
newOffset = 0;
- if (newOffset>= n)
- newOffset = n-1;
+ if (newOffset >= n)
+ newOffset = n - 1;
mCurrentOffset = newOffset;
}
}
+
public double getWindowSamples() {
//samples in current window
double samples = 0;
-
- if (bigDataArray != null) {
+ if (mBigDataArray != null) {
double zoomFactor = getZoom();
- samples = mArraySize * zoomFactor;;
+ samples = mArraySize * zoomFactor;
}
+
return samples;
}
+
public void refreshGraph() {
computeViewArray(mZoomFactorX, mCurrentOffset);
}
+
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
mWidth = w;
mHeight = h;
- log("New w: " + mWidth + " h: "+ mHeight);
+ log("New w: " + mWidth + " h: " + mHeight);
initView();
}
+
+
private void initView() {
//re init graphical elements
mArraySize = mWidth;
- mInsetSize = mWidth/5;
- valuesArray = new double[mArraySize];
- valuesArray2 = new double[mArraySize];
+ mInsetSize = mWidth / 5;
+ mValuesArray = new double[mArraySize];
+ mValuesArray2 = new double[mArraySize];
int i;
- for (i=0; i<mArraySize; i++) {
- valuesArray[i] = 0;
- valuesArray2[i] = 0;
+
+ for (i = 0; i < mArraySize; i++) {
+ mValuesArray[i] = 0;
+ mValuesArray2[i] = 0;
}
+
//inset
- insetArray = new double[mInsetSize];
- insetArray2 = new double[mInsetSize];
- Arrays.fill(insetArray, (double) 0);
- Arrays.fill(insetArray2, (double) 0);
+ mInsetArray = new double[mInsetSize];
+ mInsetArray2 = new double[mInsetSize];
+ Arrays.fill(mInsetArray, (double) 0);
+ Arrays.fill(mInsetArray2, (double) 0);
}
+
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
boolean showZoomBox = mSGDListener.mIsScaling;
boolean showGrid = true;
boolean showInset = true;
- final int COLOR_WAVE = 0xFF1E4A99;
- final int COLOR_ZOOM_BOX = 0X50E0E619;
- final int COLOR_INSET_BACKGROUND = 0xFFFFFFFF;
- final int COLOR_INSET_BORDER = 0xFF002260;
- final int COLOR_INSET_WAVE = 0xFF910000;
- final int COLOR_GRID = 0x7F002260;
- final int COLOR_GRID_TEXT = 0xFF002260;
-
- Paint myPaint = new Paint();
- myPaint.setColor(COLOR_WAVE);
- myPaint.setAntiAlias(true);
- myPaint.setStyle(Style.FILL_AND_STROKE);
- myPaint.setStrokeWidth(1);
-
- Paint paintZoomBox = new Paint();
- paintZoomBox.setColor(COLOR_ZOOM_BOX);
- paintZoomBox.setAntiAlias(true);
- paintZoomBox.setStyle(Style.FILL);
-
- Paint paintInsetBackground = new Paint();
- paintInsetBackground.setColor(COLOR_INSET_BACKGROUND);
- paintInsetBackground.setAntiAlias(true);
- paintInsetBackground.setStyle(Style.FILL);
-
- Paint paintInsetBorder = new Paint();
- paintInsetBorder.setColor(COLOR_INSET_BORDER);
- paintInsetBorder.setAntiAlias(true);
- paintInsetBorder.setStyle(Style.STROKE);
- paintInsetBorder.setStrokeWidth(1);
-
- Paint paintInset = new Paint();
- paintInset.setColor(COLOR_INSET_WAVE);
- paintInset.setAntiAlias(true);
- paintInset.setStyle(Style.FILL_AND_STROKE);
- paintInset.setStrokeWidth(1);
-
- int textSize=25;
- Paint paintGrid = new Paint(Paint.ANTI_ALIAS_FLAG);
- paintGrid.setColor(COLOR_GRID); //gray
- paintGrid.setTextSize(textSize);
-
- Paint paintGridText = new Paint(Paint.ANTI_ALIAS_FLAG);
- paintGridText.setColor(COLOR_GRID_TEXT); //BLACKgray
- paintGridText.setTextSize(textSize);
int i;
-
int w = getWidth();
int h = getHeight();
double valueMax = 1.0;
double valueMin = -1.0;
- double valueRange = valueMax-valueMin;
+ double valueRange = valueMax - valueMin;
//print gridline time in ms/seconds, etc.
if (showGrid) {
//current number of samples in display
double samples = getWindowSamples();
- if (samples > 0.0 && mSamplingRate>0) {
- double windowMs = (1000.0*samples)/mSamplingRate;
+ if (samples > 0.0 && mSamplingRate > 0) {
+ double windowMs = (1000.0 * samples) / mSamplingRate;
//decide the best units: ms, 10ms, 100ms, 1 sec, 2 sec
- double msPerDivision = windowMs/10;
- log(" windowMS: "+ windowMs + " msPerdivision: " +msPerDivision);
-
+ double msPerDivision = windowMs / 10;
+ log(" windowMS: " + windowMs + " msPerdivision: " + msPerDivision);
int divisionInMS = 1;
//find the best level for markings:
- if (msPerDivision <= 5 ) {
+ if (msPerDivision <= 5) {
divisionInMS = 1;
- } else if (msPerDivision <15 ) {
+ } else if (msPerDivision < 15) {
divisionInMS = 10;
- } else if (msPerDivision <30 ) {
+ } else if (msPerDivision < 30) {
divisionInMS = 20;
- } else if (msPerDivision <60 ) {
+ } else if (msPerDivision < 60) {
divisionInMS = 40;
- } else if (msPerDivision <150 ) {
+ } else if (msPerDivision < 150) {
divisionInMS = 100;
- } else if (msPerDivision <400 ) {
+ } else if (msPerDivision < 400) {
divisionInMS = 200;
- } else if (msPerDivision <750 ) {
+ } else if (msPerDivision < 750) {
divisionInMS = 500;
} else {
divisionInMS = 1000;
}
-
- log(" chosen Division in MS: " +divisionInMS);
+ log(" chosen Division in MS: " + divisionInMS);
//current offset in samples
int currentOffsetSamples = getOffset();
- double currentOffsetMs = (1000.0*currentOffsetSamples)/mSamplingRate;
- int gridCount = (int) ((currentOffsetMs +divisionInMS) / divisionInMS);
+ double currentOffsetMs = (1000.0 * currentOffsetSamples) / mSamplingRate;
+ int gridCount = (int) ((currentOffsetMs + divisionInMS) / divisionInMS);
double startGridCountFrac = ((currentOffsetMs) % divisionInMS);
+ log(" gridCount:" + gridCount + " fraction: " + startGridCountFrac +
+ " firstDivision: " + gridCount * divisionInMS);
- log(" gridCount:"+gridCount +" fraction: "+startGridCountFrac +" firstDivision: " +
- gridCount * divisionInMS );
-
- double currentGridMs = divisionInMS -startGridCountFrac; //in mS
- while (currentGridMs <= windowMs ) {
-
- float newX = (float)( w * currentGridMs/windowMs );
- canvas.drawLine(newX, 0, newX, h, paintGrid);
+ double currentGridMs = divisionInMS - startGridCountFrac; //in mS
+ while (currentGridMs <= windowMs) {
+ float newX = (float) (w * currentGridMs / windowMs);
+ canvas.drawLine(newX, 0, newX, h, mPaintGrid);
double currentGridValueMS = gridCount * divisionInMS;
- String label = String.format("%.0f ms", (float)currentGridValueMS);
+ String label = String.format("%.0f ms", (float) currentGridValueMS);
//path
Path myPath = new Path();
myPath.moveTo(newX, h);
- myPath.lineTo(newX, h/2);
+ myPath.lineTo(newX, h / 2);
- canvas.drawTextOnPath(label, myPath,10,-3, paintGridText);
+ canvas.drawTextOnPath(label, myPath, 10, -3, mPaintGridText);
//advance
currentGridMs += divisionInMS;
@@ -299,116 +327,121 @@ public class WavePlotView extends View {
}
//horizontal line
- canvas.drawLine(0, h/2, w, h/2, paintGrid);
-
+ canvas.drawLine(0, h / 2, w, h / 2, mPaintGrid);
}
}
- float deltaX = (float)w/mArraySize;
+ float deltaX = (float) w / mArraySize;
//top
Path myPath = new Path();
- myPath.moveTo(0,h/2); //start
-
- for (i=0; i<mArraySize; i++) {
- double value = valuesArray[i];
- double valueScaled = (valueMax-value) / valueRange;
- float newX = i*deltaX;
- float newY = (float)((valueScaled)*h);
- myPath.lineTo(newX,newY);
- }
-
- //bottom
- for (i=mArraySize-1; i>=0; i--) {
- double value = valuesArray2[i];
- double valueScaled = (valueMax-value) / valueRange;
- float newX = i*deltaX;
- float newY = (float)((valueScaled)*h);
- myPath.lineTo(newX,newY);
- }
- //close
- myPath.close();
- canvas.drawPath(myPath, myPaint);
-
-
- if (showZoomBox) {
- float x1= (float)mSGDListener.mX1;
- float x2= (float)mSGDListener.mX2;
- canvas.drawRect(x1,0,x2,h,paintZoomBox);
- }
- if (showInset) {
- float iW = (float)(w*0.2);
- float iH = (float)(h*0.2);
- float iX = (float)(w*0.7);
- float iY = (float)(h*0.1);
- //x,y of inset
- canvas.drawRect(iX, iY, iX+iW, iY+iH,paintInsetBackground);
- canvas.drawRect(iX-1, iY-1, iX+iW+2,iY+iH+2,paintInsetBorder);
- //paintInset
- float iDeltaX = (float)iW/mInsetSize;
-
- //top
- Path iPath = new Path();
- iPath.moveTo(iX,iY +iH/2); //start
-
- for (i=0; i<mInsetSize; i++) {
- double value = insetArray[i];
- double valueScaled = (valueMax-value) / valueRange;
- float newX = iX + i*iDeltaX;
- float newY = iY + (float)((valueScaled)*iH);
- iPath.lineTo(newX,newY);
+ myPath.moveTo(0, h / 2); //start
+
+ if (mBigDataArray != null) {
+ for (i = 0; i < mArraySize; i++) {
+ double value = mValuesArray[i];
+ double valueScaled = (valueMax - value) / valueRange;
+ float newX = i * deltaX;
+ float newY = (float) (valueScaled * h);
+ myPath.lineTo(newX, newY);
}
//bottom
- for (i=mInsetSize-1; i>=0; i--) {
- double value = insetArray2[i];
- double valueScaled = (valueMax-value) / valueRange;
- float newX = iX+i*iDeltaX;
- float newY = iY+(float)((valueScaled)*iH);
- iPath.lineTo(newX,newY);
+ for (i = mArraySize - 1; i >= 0; i--) {
+ double value = mValuesArray2[i];
+ double valueScaled = (valueMax - value) / valueRange;
+ float newX = i * deltaX;
+ float newY = (float) (valueScaled * h);
+ myPath.lineTo(newX, newY);
}
//close
- iPath.close();
- canvas.drawPath(iPath, paintInset);
- if (bigDataArray != null) {
- //paint current region of zoom
- int offsetSamples = getOffset();
- double windowSamples = getWindowSamples();
- int samples = bigDataArray.length;
- if (samples > 0) {
- float x1 = (float)(iW * offsetSamples/samples);
- float x2 = (float)(iW *(offsetSamples + windowSamples)/samples);
-
- canvas.drawRect(iX+x1,iY,iX+x2,iY+iH,paintZoomBox);
+ myPath.close();
+ canvas.drawPath(myPath, mMyPaint);
+
+
+ if (showZoomBox) {
+ float x1 = (float) mSGDListener.mX1;
+ float x2 = (float) mSGDListener.mX2;
+ canvas.drawRect(x1, 0, x2, h, mPaintZoomBox);
+ }
+
+ if (showInset) {
+ float iW = (float) (w * 0.2);
+ float iH = (float) (h * 0.2);
+ float iX = (float) (w * 0.7);
+ float iY = (float) (h * 0.1);
+ //x, y of inset
+ canvas.drawRect(iX, iY, iX + iW, iY + iH, mPaintInsetBackground);
+ canvas.drawRect(iX - 1, iY - 1, iX + iW + 2, iY + iH + 2, mPaintInsetBorder);
+ //paintInset
+ float iDeltaX = (float) iW / mInsetSize;
+
+ //top
+ Path iPath = new Path();
+ iPath.moveTo(iX, iY + (iH / 2)); //start
+
+ for (i = 0; i < mInsetSize; i++) {
+ double value = mInsetArray[i];
+ double valueScaled = (valueMax - value) / valueRange;
+ float newX = iX + (i * iDeltaX);
+ float newY = iY + (float) (valueScaled * iH);
+ iPath.lineTo(newX, newY);
+ }
+
+ //bottom
+ for (i = mInsetSize - 1; i >= 0; i--) {
+ double value = mInsetArray2[i];
+ double valueScaled = (valueMax - value) / valueRange;
+ float newX = iX + i * iDeltaX;
+ float newY = iY + (float) (valueScaled * iH);
+ iPath.lineTo(newX, newY);
+ }
+
+ //close
+ iPath.close();
+ canvas.drawPath(iPath, mPaintInset);
+
+ if (mBigDataArray != null) {
+ //paint current region of zoom
+ int offsetSamples = getOffset();
+ double windowSamples = getWindowSamples();
+ int samples = mBigDataArray.length;
+
+ if (samples > 0) {
+ float x1 = (float) (iW * offsetSamples / samples);
+ float x2 = (float) (iW * (offsetSamples + windowSamples) / samples);
+
+ canvas.drawRect(iX + x1, iY, iX + x2, iY + iH, mPaintZoomBox);
+ }
}
}
}
-
}
+
void resetArray() {
- Arrays.fill(valuesArray, 0);
- Arrays.fill(valuesArray2, 0);
+ Arrays.fill(mValuesArray, 0);
+ Arrays.fill(mValuesArray2, 0);
}
+
void computeInset() {
- if (bigDataArray != null) {
- int sampleCount = bigDataArray.length;
- double pointsPerSample = (double)mInsetSize/sampleCount;
+ if (mBigDataArray != null) {
+ int sampleCount = mBigDataArray.length;
+ double pointsPerSample = (double) mInsetSize / sampleCount;
- Arrays.fill(insetArray, 0);
- Arrays.fill(insetArray2, 0);
+ Arrays.fill(mInsetArray, 0);
+ Arrays.fill(mInsetArray2, 0);
- double currentIndex =0; //points.
+ double currentIndex = 0; //points.
double max = -1.0;
double min = 1.0;
double maxAbs = 0.0;
- int index =0;
+ int index = 0;
- for (int i=0; i<sampleCount; i++) {
-
- double value = bigDataArray[i];
- if (value > max ) {
+ for (int i = 0; i < sampleCount; i++) {
+ double value = mBigDataArray[i];
+ if (value > max) {
max = value;
}
@@ -416,13 +449,11 @@ public class WavePlotView extends View {
min = value;
}
- int prevIndexInt = (int)currentIndex;
+ int prevIndexInt = (int) currentIndex;
currentIndex += pointsPerSample;
- if ((int)currentIndex > prevIndexInt) { //it switched, time to decide
-
- // log(" i="+i+" currentIndex: " + currentIndex + " max =" +max);
- insetArray[index] = max;
- insetArray2[index] = min;
+ if ((int) currentIndex > prevIndexInt) { //it switched, time to decide
+ mInsetArray[index] = max;
+ mInsetArray2[index] = min;
if (Math.abs(max) > maxAbs) maxAbs = Math.abs(max);
if (Math.abs(min) > maxAbs) maxAbs = Math.abs(min);
@@ -435,45 +466,42 @@ public class WavePlotView extends View {
if (index >= mInsetSize)
break;
}
+
//now, normalize
if (maxAbs > 0) {
- for (int i=0; i<mInsetSize; i++ ) {
- insetArray[i] /= maxAbs;
- insetArray2[i] /= maxAbs;
+ for (int i = 0; i < mInsetSize; i++) {
+ mInsetArray[i] /= maxAbs;
+ mInsetArray2[i] /= maxAbs;
}
}
+
}
}
- void computeViewArray (double zoomFactorX, int sampleOffset) {
+
+ void computeViewArray(double zoomFactorX, int sampleOffset) {
//zoom factor: how many samples per point. 1.0 = 1.0 samples per point
// sample offset in samples.
- if (zoomFactorX <1.0)
+ if (zoomFactorX < 1.0)
zoomFactorX = 1.0;
-
-
- if (bigDataArray != null) {
- int sampleCount = bigDataArray.length;
+ if (mBigDataArray != null) {
+ int sampleCount = mBigDataArray.length;
double samplesPerPoint = zoomFactorX;
- double pointsPerSample = 1.0/samplesPerPoint;
+ double pointsPerSample = 1.0 / samplesPerPoint;
-// log(" zoom: " + zoomFactorX + " sampleoffset: "+sampleOffset + " pointsPerSample: "+
-// pointsPerSample);
- //erase output array
- //Arrays.fill( buffer, (byte)0);
resetArray();
- double currentIndex =0; //points.
+ double currentIndex = 0; //points.
double max = -1.0;
double min = 1.0;
- int index =0;
+ int index = 0;
- for (int i=sampleOffset; i<sampleCount; i++) {
+ for (int i = sampleOffset; i < sampleCount; i++) {
- double value = bigDataArray[i];
- if (value > max ) {
+ double value = mBigDataArray[i];
+ if (value > max) {
max = value;
}
@@ -481,13 +509,11 @@ public class WavePlotView extends View {
min = value;
}
- int prevIndexInt = (int)currentIndex;
+ int prevIndexInt = (int) currentIndex;
currentIndex += pointsPerSample;
- if ((int)currentIndex > prevIndexInt) { //it switched, time to decide
-
- // log(" i="+i+" currentIndex: " + currentIndex + " max =" +max);
- valuesArray[index] = max;
- valuesArray2[index] = min;
+ if ((int) currentIndex > prevIndexInt) { //it switched, time to decide
+ mValuesArray[index] = max;
+ mValuesArray2[index] = min;
max = -1.0;
min = 1.0;
@@ -497,147 +523,148 @@ public class WavePlotView extends View {
if (index >= mArraySize)
break;
}
-// log(" array values pushed: "+count);
- }//big data array not null
+ } //big data array not null
redraw();
}
+
void setData(double [] dataVector) {
- bigDataArray = dataVector;
+ mBigDataArray = dataVector;
double maxZoom = getMaxZoomOut();
setZoom(maxZoom);
- setOffset(0,false);
+ setOffset(0, false);
computeInset();
refreshGraph();
}
+
void redraw() {
invalidate();
}
+
@Override
public boolean onTouchEvent(MotionEvent event) {
mDetector.onTouchEvent(event);
mSGDetector.onTouchEvent(event);
- // return super.onTouchEvent(event);
+ //return super.onTouchEvent(event);
return true;
}
+
class MyGestureListener extends GestureDetector.SimpleOnGestureListener {
- private static final String DEBUG_TAG = "WavePlotGesture";
+ private static final String DEBUG_TAG = "MyGestureListener";
+
@Override
public boolean onDown(MotionEvent event) {
- Log.d(DEBUG_TAG,"onDown: " + event.toString() + " " + TAG);
+ Log.d(DEBUG_TAG, "onDown: " + event.toString() + " " + TAG);
return true;
}
+
@Override
public boolean onFling(MotionEvent event1, MotionEvent event2,
- float velocityX, float velocityY) {
- //Log.d(DEBUG_TAG, "onFling: " + event1.toString()+event2.toString());
- Log.d(DEBUG_TAG, "onFling: VelocityX: " +velocityX +" velocityY: "+velocityY);
+ float velocityX, float velocityY) {
+ Log.d(DEBUG_TAG, "onFling: VelocityX: " + velocityX + " velocityY: " + velocityY);
//velocityX positive left to right
// negative: right to left
//double offset = getZoom()
double samplesPerWindow = mArraySize * getZoom();
-
int maxPixelsPerWindow = 8000;
-
- double offsetFactor = -(double)(velocityX/maxPixelsPerWindow);
-
- double offset = (double)(samplesPerWindow *offsetFactor/3.0);
-
+ double offsetFactor = -(double) (velocityX / maxPixelsPerWindow);
+ double offset = (samplesPerWindow * offsetFactor / 3.0);
Log.d(DEBUG_TAG, " VELOCITY: " + velocityX + " samples/window = " + samplesPerWindow +
" offsetFactor = " + offsetFactor + " offset: " + offset);
- setOffset((int)offset,true);
+
+ setOffset((int) offset, true);
refreshGraph();
return true;
}
+
@Override
public boolean onDoubleTap(MotionEvent event) {
- Log.d(DEBUG_TAG,"onDoubleTap: " + event.toString());
+ Log.d(DEBUG_TAG, "onDoubleTap: " + event.toString());
setZoom(100000);
- setOffset(0,false);
+ setOffset(0, false);
refreshGraph();
return true;
}
-
}
- private class MyScaleGestureListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
- private static final String DEBUG_TAG = "WavePlotScaleGesture";
+ private class MyScaleGestureListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
+ private static final String DEBUG_TAG = "MyScaleGestureListener";
public boolean mIsScaling = false;
public double mX1 = 0;
public double mX2 = 0;
+
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
mIsScaling = true;
return super.onScaleBegin(detector);
}
+
@Override
- public void onScaleEnd (ScaleGestureDetector detector) {
+ public void onScaleEnd(ScaleGestureDetector detector) {
mIsScaling = false;
//now zoom
{
int w = getWidth();
- int h = getHeight();
+ //int h = getHeight();
- double currentSpan = detector.getCurrentSpan();
+ //double currentSpan = detector.getCurrentSpan();
double currentSpanX = detector.getCurrentSpanX();
- double currentSpanY = detector.getCurrentSpanY();
+ //double currentSpanY = detector.getCurrentSpanY();
double focusX = detector.getFocusX();
- double focusY = detector.getFocusY();
- double scaleFactor = detector.getScaleFactor();
+ //double focusY = detector.getFocusY();
+ //double scaleFactor = detector.getScaleFactor();
//estimated X1, X2
- double x1 = focusX - currentSpanX/2;
- double x2 = focusX + currentSpanX/2;
+ double x1 = focusX - (currentSpanX / 2);
+ double x2 = focusX + (currentSpanX / 2);
+ //double x1clip = x1 < 0 ? 0 : (x1 > w ? w : x1);
+ //double x2clip = x2 < 0 ? 0 : (x2 > w ? w : x2);
- double x1clip = x1 <0 ? 0 : (x1>w ? w : x1);
- double x2clip = x2 <0 ? 0 : (x2>w ? w : x2);
-//
-// mX1 = x1clip;
-// mX2 = x2clip;
-
- int originalOffset = getOffset();
+ //int originalOffset = getOffset();
double windowSamplesOriginal = getWindowSamples(); //samples in current window
double currentZoom = getZoom();
- double windowFactor = Math.abs(mX2-mX1)/w;
+ double windowFactor = Math.abs(mX2 - mX1) / w;
- double newZoom = currentZoom * windowFactor;
- setZoom(newZoom);
- int newOffset = (int)(windowSamplesOriginal * mX1/w);
- setOffset(newOffset,true); //relative
+ double newZoom = currentZoom * windowFactor;
+ setZoom(newZoom);
+ int newOffset = (int) (windowSamplesOriginal * mX1 / w);
+ setOffset(newOffset, true); //relative
}
refreshGraph();
}
+
@Override
public boolean onScale(ScaleGestureDetector detector) {
int w = getWidth();
- int h = getHeight();
- double currentSpan = detector.getCurrentSpan();
+ //int h = getHeight();
+ //double currentSpan = detector.getCurrentSpan();
double currentSpanX = detector.getCurrentSpanX();
- double currentSpanY = detector.getCurrentSpanY();
+ //double currentSpanY = detector.getCurrentSpanY();
double focusX = detector.getFocusX();
- double focusY = detector.getFocusY();
- double scaleFactor = detector.getScaleFactor();
+ //double focusY = detector.getFocusY();
+ //double scaleFactor = detector.getScaleFactor();
+
//estimated X1, X2
- double x1 = focusX - currentSpanX/2;
- double x2 = focusX + currentSpanX/2;
- double x1clip = x1 <0 ? 0 : (x1>w ? w : x1);
- double x2clip = x2 <0 ? 0 : (x2>w ? w : x2);
+ double x1 = focusX - (currentSpanX / 2);
+ double x2 = focusX + (currentSpanX / 2);
+ double x1clip = x1 < 0 ? 0 : (x1 > w ? w : x1);
+ double x2clip = x2 < 0 ? 0 : (x2 > w ? w : x2);
mX1 = x1clip;
mX2 = x2clip;
refreshGraph();
@@ -647,7 +674,7 @@ public class WavePlotView extends View {
private static void log(String msg) {
- Log.v("WavePlot", msg);
+ Log.v(TAG, msg);
}
}
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/atomic.c b/LoopbackApp/app/src/main/jni/audio_utils/atomic.c
index db2b3fc..a22ed15 100644
--- a/LoopbackApp/app/src/main/jni/audio_utils/atomic.c
+++ b/LoopbackApp/app/src/main/jni/audio_utils/atomic.c
@@ -18,14 +18,12 @@
#include <stdatomic.h>
-int32_t android_atomic_acquire_load(volatile const int32_t* addr)
-{
- volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*)addr;
+int32_t android_atomic_acquire_load(volatile const int32_t* addr) {
+ volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*) addr;
return atomic_load_explicit(a, memory_order_acquire);
}
-void android_atomic_release_store(int32_t value, volatile int32_t* addr)
-{
- volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*)addr;
+void android_atomic_release_store(int32_t value, volatile int32_t* addr) {
+ volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*) addr;
atomic_store_explicit(a, value, memory_order_release);
}
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/fifo.c b/LoopbackApp/app/src/main/jni/audio_utils/fifo.c
index ea9a8d1..2f00a7f 100644
--- a/LoopbackApp/app/src/main/jni/audio_utils/fifo.c
+++ b/LoopbackApp/app/src/main/jni/audio_utils/fifo.c
@@ -25,9 +25,9 @@
//#include <cutils/log.h>
#define ALOG_ASSERT(exp)
+
void audio_utils_fifo_init(struct audio_utils_fifo *fifo, size_t frameCount, size_t frameSize,
- void *buffer)
-{
+ void *buffer) {
// We would need a 64-bit roundup to support larger frameCount.
ALOG_ASSERT(fifo != NULL && frameCount > 0 && frameSize > 0 && buffer != NULL);
fifo->mFrameCount = frameCount;
@@ -39,14 +39,15 @@ void audio_utils_fifo_init(struct audio_utils_fifo *fifo, size_t frameCount, siz
fifo->mRear = 0;
}
+
void audio_utils_fifo_deinit(struct audio_utils_fifo *fifo __unused)
{
}
+
// Return a new index as the sum of an old index (either mFront or mRear) and a specified increment.
static inline int32_t audio_utils_fifo_sum(struct audio_utils_fifo *fifo, int32_t index,
- uint32_t increment)
-{
+ uint32_t increment) {
if (fifo->mFudgeFactor) {
uint32_t mask = fifo->mFrameCountP2 - 1;
ALOG_ASSERT((index & mask) < fifo->mFrameCount);
@@ -54,6 +55,7 @@ static inline int32_t audio_utils_fifo_sum(struct audio_utils_fifo *fifo, int32_
if ((index & mask) + increment >= fifo->mFrameCount) {
increment += fifo->mFudgeFactor;
}
+
index += increment;
ALOG_ASSERT((index & mask) < fifo->mFrameCount);
return index;
@@ -62,74 +64,84 @@ static inline int32_t audio_utils_fifo_sum(struct audio_utils_fifo *fifo, int32_
}
}
+
// Return the difference between two indices: rear - front, where 0 <= difference <= mFrameCount.
static inline size_t audio_utils_fifo_diff(struct audio_utils_fifo *fifo, int32_t rear,
- int32_t front)
-{
+ int32_t front) {
int32_t diff = rear - front;
+
if (fifo->mFudgeFactor) {
uint32_t mask = ~(fifo->mFrameCountP2 - 1);
int32_t genDiff = (rear & mask) - (front & mask);
+
if (genDiff != 0) {
ALOG_ASSERT(genDiff == (int32_t) fifo->mFrameCountP2);
diff -= fifo->mFudgeFactor;
}
}
+
// FIFO should not be overfull
ALOG_ASSERT(0 <= diff && diff <= (int32_t) fifo->mFrameCount);
return (size_t) diff;
}
-ssize_t audio_utils_fifo_write(struct audio_utils_fifo *fifo, const void *buffer, size_t count)
-{
+
+ssize_t audio_utils_fifo_write(struct audio_utils_fifo *fifo, const void *buffer, size_t count) {
int32_t front = android_atomic_acquire_load(&fifo->mFront);
int32_t rear = fifo->mRear;
size_t availToWrite = fifo->mFrameCount - audio_utils_fifo_diff(fifo, rear, front);
+
if (availToWrite > count) {
availToWrite = count;
}
+
rear &= fifo->mFrameCountP2 - 1;
size_t part1 = fifo->mFrameCount - rear;
if (part1 > availToWrite) {
part1 = availToWrite;
}
+
if (part1 > 0) {
memcpy((char *) fifo->mBuffer + (rear * fifo->mFrameSize), buffer,
part1 * fifo->mFrameSize);
size_t part2 = availToWrite - part1;
+
if (part2 > 0) {
memcpy(fifo->mBuffer, (char *) buffer + (part1 * fifo->mFrameSize),
part2 * fifo->mFrameSize);
}
+
android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mRear, availToWrite),
&fifo->mRear);
}
return availToWrite;
}
-ssize_t audio_utils_fifo_read(struct audio_utils_fifo *fifo, void *buffer, size_t count)
-{
+
+ssize_t audio_utils_fifo_read(struct audio_utils_fifo *fifo, void *buffer, size_t count) {
int32_t rear = android_atomic_acquire_load(&fifo->mRear);
int32_t front = fifo->mFront;
size_t availToRead = audio_utils_fifo_diff(fifo, rear, front);
if (availToRead > count) {
availToRead = count;
}
+
front &= fifo->mFrameCountP2 - 1;
size_t part1 = fifo->mFrameCount - front;
if (part1 > availToRead) {
part1 = availToRead;
}
+
if (part1 > 0) {
memcpy(buffer, (char *) fifo->mBuffer + (front * fifo->mFrameSize),
- part1 * fifo->mFrameSize);
+ part1 * fifo->mFrameSize);
size_t part2 = availToRead - part1;
if (part2 > 0) {
memcpy((char *) buffer + (part1 * fifo->mFrameSize), fifo->mBuffer,
- part2 * fifo->mFrameSize);
+ part2 * fifo->mFrameSize);
}
android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mFront, availToRead),
- &fifo->mFront);
+ &fifo->mFront);
}
return availToRead;
}
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/roundup.c b/LoopbackApp/app/src/main/jni/audio_utils/roundup.c
index 4f9af6a..6c8e504 100644
--- a/LoopbackApp/app/src/main/jni/audio_utils/roundup.c
+++ b/LoopbackApp/app/src/main/jni/audio_utils/roundup.c
@@ -16,12 +16,12 @@
#include "roundup.h"
-unsigned roundup(unsigned v)
-{
+unsigned roundup(unsigned v) {
// __builtin_clz is undefined for zero input
if (v == 0) {
v = 1;
}
+
int lz = __builtin_clz((int) v);
unsigned rounded = ((unsigned) 0x80000000) >> lz;
// 0x800000001 and higher are actually rounded _down_ to prevent overflow
diff --git a/LoopbackApp/app/src/main/jni/jni_sles.c b/LoopbackApp/app/src/main/jni/jni_sles.c
index f2a8877..5a05d5c 100644
--- a/LoopbackApp/app/src/main/jni/jni_sles.c
+++ b/LoopbackApp/app/src/main/jni/jni_sles.c
@@ -20,48 +20,91 @@
#include <stdio.h>
#include <stddef.h>
-/////
+
JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesInit
- (JNIEnv *env __unused, jobject obj __unused, jint samplingRate, jint frameCount, jint micSource) {
+ (JNIEnv *env __unused, jobject obj __unused, jint samplingRate, jint frameCount, jint micSource,
+ jint testType, jdouble frequency1, jobject byteBuffer) {
sles_data * pSles = NULL;
- if( slesInit(&pSles, samplingRate, frameCount, micSource) != SLES_FAIL ) {
+ char* byteBufferPtr = (*env)->GetDirectBufferAddress(env, byteBuffer);
+ int byteBufferLength = (*env)->GetDirectBufferCapacity(env, byteBuffer);
- return (long)pSles;
+ if (slesInit(&pSles, samplingRate, frameCount, micSource,
+ testType, frequency1, byteBufferPtr, byteBufferLength) != SLES_FAIL) {
+ return (long) pSles;
}
+
// FIXME This should be stored as a (long) field in the object,
- // so that incorrect Java code could not synthesize a bad sles pointer.
+ // so that incorrect Java code could not synthesize a bad sles pointer.
return 0;
}
+
JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesProcessNext
(JNIEnv *env __unused, jobject obj __unused, jlong sles, jdoubleArray samplesArray, jlong offset) {
- sles_data * pSles= (sles_data*) sles;
+ sles_data * pSles = (sles_data*) (size_t) sles;
long maxSamples = (*env)->GetArrayLength(env, samplesArray);
- double *pSamples = (*env)->GetDoubleArrayElements(env, samplesArray,0);
+ double *pSamples = (*env)->GetDoubleArrayElements(env, samplesArray, 0);
long availableSamples = maxSamples-offset;
double *pCurrentSample = pSamples+offset;
+ SLES_PRINTF("jni slesProcessNext pSles:%p, currentSample %p, availableSamples %d ",
+ pSles, pCurrentSample, availableSamples);
+
+ int samplesRead = slesProcessNext(pSles, pCurrentSample, availableSamples);
+ return samplesRead;
+}
- //int samplesRead = slesProcessNext(pSles, pSamples, maxSamples);
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesDestroy
+ (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+ sles_data * pSles = (sles_data*) (size_t) sles;
+ int status = slesDestroy(&pSles);
+ return status;
+}
- SLES_PRINTF("jni slesProcessNext pSles:%p, currentSample %p, availableSamples %d ", pSles, pCurrentSample, availableSamples);
+JNIEXPORT jintArray JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderBufferPeriod
+ (JNIEnv *env, jobject obj, jlong sles) {
+ sles_data * pSles = (sles_data*) (size_t) sles;
+ int* recorderBufferPeriod = slesGetRecorderBufferPeriod(pSles);
- int samplesRead = slesProcessNext(pSles, pCurrentSample, availableSamples);
+ // get the length = RANGE
+ jintArray result = (*env)->NewIntArray(env, RANGE);
+ (*env)->SetIntArrayRegion(env, result, 0, RANGE, recorderBufferPeriod);
- return samplesRead;
+ return result;
}
-JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesDestroy
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderMaxBufferPeriod
(JNIEnv *env __unused, jobject obj __unused, jlong sles) {
- sles_data * pSles= (sles_data*) sles;
+ sles_data * pSles = (sles_data*) (size_t) sles;
+ int* recorderMaxBufferPeriod = slesGetRecorderMaxBufferPeriod(pSles);
- int status = slesDestroy(&pSles);
+ return recorderMaxBufferPeriod;
+}
- return status;
+
+JNIEXPORT jintArray JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerBufferPeriod
+ (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+ sles_data * pSles = (sles_data*) (size_t) sles;
+ int* playerBufferPeriod = slesGetPlayerBufferPeriod(pSles);
+
+ jintArray result = (*env)->NewIntArray(env, RANGE);
+ (*env)->SetIntArrayRegion(env, result, 0, RANGE, playerBufferPeriod);
+
+ return result;
+}
+
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerMaxBufferPeriod
+ (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+ sles_data * pSles = (sles_data*) (size_t) sles;
+ int playerMaxBufferPeriod = slesGetPlayerMaxBufferPeriod(pSles);
+
+ return playerMaxBufferPeriod;
}
diff --git a/LoopbackApp/app/src/main/jni/jni_sles.h b/LoopbackApp/app/src/main/jni/jni_sles.h
index bc3b371..33c792c 100644
--- a/LoopbackApp/app/src/main/jni/jni_sles.h
+++ b/LoopbackApp/app/src/main/jni/jni_sles.h
@@ -26,14 +26,25 @@ extern "C" {
////////////////////////
////SLE
JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesInit
- (JNIEnv *, jobject, jint, jint, jint );
+ (JNIEnv *, jobject, jint, jint, jint, jint, jdouble, jobject byteBuffer);
JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesProcessNext
- (JNIEnv *, jobject , jlong, jdoubleArray, jlong );
+ (JNIEnv *, jobject, jlong, jdoubleArray, jlong);
JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesDestroy
- (JNIEnv *, jobject , jlong );
+ (JNIEnv *, jobject, jlong);
+JNIEXPORT jintArray JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderBufferPeriod
+ (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderMaxBufferPeriod
+ (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jintArray JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerBufferPeriod
+ (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerMaxBufferPeriod
+ (JNIEnv *, jobject, jlong);
#ifdef __cplusplus
}
diff --git a/LoopbackApp/app/src/main/jni/sles.cpp b/LoopbackApp/app/src/main/jni/sles.cpp
index 98245e0..4e8f358 100644
--- a/LoopbackApp/app/src/main/jni/sles.cpp
+++ b/LoopbackApp/app/src/main/jni/sles.cpp
@@ -24,6 +24,9 @@
// It will generate feedback (Larsen effect) if played through on-device speakers,
// or acts as a delay if played through headset.
+#define _USE_MATH_DEFINES
+#include <cmath>
+
#include "sles.h"
#include <stdio.h>
#include <stdlib.h>
@@ -35,24 +38,31 @@
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
+//#include <jni.h>
+#include <time.h>
-int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource) {
+int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource,
+ int testType, double frequency1, char* byteBufferPtr, int byteBufferLength) {
int status = SLES_FAIL;
if (ppSles != NULL) {
- sles_data * pSles = (sles_data*)malloc( sizeof (sles_data));
+ sles_data * pSles = (sles_data*) malloc(sizeof(sles_data));
- memset(pSles,0,sizeof(sles_data));
+ memset(pSles, 0, sizeof(sles_data));
- SLES_PRINTF("malloc %d bytes at %p",sizeof(sles_data), pSles);
- //__android_log_print(ANDROID_LOG_INFO, "sles_jni", "malloc %d bytes at %p",sizeof(sles_data), pSles);//Or ANDROID_LOG_INFO, ...
+ SLES_PRINTF("malloc %d bytes at %p", sizeof(sles_data), pSles);
+ //__android_log_print(ANDROID_LOG_INFO, "sles_jni",
+ //"malloc %d bytes at %p", sizeof(sles_data), pSles);//Or ANDROID_LOG_INFO, ...
*ppSles = pSles;
if (pSles != NULL)
{
- SLES_PRINTF("creating server. Sampling rate =%d, frame count = %d",samplingRate, frameCount);
- status = slesCreateServer(pSles, samplingRate, frameCount, micSource);
- SLES_PRINTF("slesCreateServer =%d",status);
+ SLES_PRINTF("creating server. Sampling rate =%d, frame count = %d",
+ samplingRate, frameCount);
+ status = slesCreateServer(pSles, samplingRate, frameCount, micSource,
+ testType, frequency1, byteBufferPtr, byteBufferLength);
+ SLES_PRINTF("slesCreateServer =%d", status);
}
}
+
return status;
}
int slesDestroy(sles_data ** ppSles) {
@@ -60,7 +70,7 @@ int slesDestroy(sles_data ** ppSles) {
if (ppSles != NULL) {
slesDestroyServer(*ppSles);
- if(*ppSles != NULL)
+ if (*ppSles != NULL)
{
free(*ppSles);
*ppSles = 0;
@@ -74,53 +84,70 @@ int slesDestroy(sles_data ** ppSles) {
(unsigned) (x), (unsigned) (y)); assert((x) == (y)); } } while (0)
-// Called after audio recorder fills a buffer with data
-static void recorderCallback(SLAndroidSimpleBufferQueueItf caller __unused, void *context ) {
+// Called after audio recorder fills a buffer with data, then we can read from this filled buffer
+static void recorderCallback(SLAndroidSimpleBufferQueueItf caller __unused, void *context) {
sles_data *pSles = (sles_data*) context;
if (pSles != NULL) {
+ collectRecorderBufferPeriod(pSles);
-
-
+ //__android_log_print(ANDROID_LOG_INFO, "sles_jni", "in the recordercallback");
SLresult result;
pthread_mutex_lock(&(pSles->mutex));
- //ee SLES_PRINTF("<R");
+ //ee SLES_PRINTF("<R");
// We should only be called when a recording buffer is done
assert(pSles->rxFront <= pSles->rxBufCount);
assert(pSles->rxRear <= pSles->rxBufCount);
assert(pSles->rxFront != pSles->rxRear);
- char *buffer = pSles->rxBuffers[pSles->rxFront];
+ char *buffer = pSles->rxBuffers[pSles->rxFront]; //pSles->rxBuffers stores the data recorded
+
// Remove buffer from record queue
if (++pSles->rxFront > pSles->rxBufCount) {
pSles->rxFront = 0;
}
- ssize_t actual = audio_utils_fifo_write(&(pSles->fifo), buffer,
- (size_t) pSles->bufSizeInFrames);
- if (actual != (ssize_t) pSles->bufSizeInFrames) {
- write(1, "?", 1);
- }
-
- // This is called by a realtime (SCHED_FIFO) thread,
- // and it is unsafe to do I/O as it could block for unbounded time.
- // Flash filesystem is especially notorious for blocking.
- if (pSles->fifo2Buffer != NULL) {
- actual = audio_utils_fifo_write(&(pSles->fifo2), buffer,
+ if (pSles->testType == TEST_TYPE_LATENCY) {
+ ssize_t actual = audio_utils_fifo_write(&(pSles->fifo), buffer,
(size_t) pSles->bufSizeInFrames);
+
if (actual != (ssize_t) pSles->bufSizeInFrames) {
write(1, "?", 1);
}
+
+ // This is called by a realtime (SCHED_FIFO) thread,
+ // and it is unsafe to do I/O as it could block for unbounded time.
+ // Flash filesystem is especially notorious for blocking.
+ if (pSles->fifo2Buffer != NULL) {
+ actual = audio_utils_fifo_write(&(pSles->fifo2), buffer,
+ (size_t) pSles->bufSizeInFrames);
+ if (actual != (ssize_t) pSles->bufSizeInFrames) {
+ write(1, "?", 1);
+ }
+ }
+ } else if (pSles->testType == TEST_TYPE_BUFFER_PERIOD) {
+ if (pSles->fifo2Buffer != NULL) {
+ ssize_t actual = byteBuffer_write(pSles, buffer, (size_t) pSles->bufSizeInFrames);
+
+ //FIXME should log errors using other methods instead of printing to terminal
+ if (actual != (ssize_t) pSles->bufSizeInFrames) {
+ write(1, "?", 1);
+ }
+ }
}
+
// Enqueue this same buffer for the recorder to fill again.
result = (*(pSles->recorderBufferQueue))->Enqueue(pSles->recorderBufferQueue, buffer,
- pSles->bufSizeInBytes);
+ pSles->bufSizeInBytes);
+ //__android_log_print(ANDROID_LOG_INFO, "recorderCallback", "recorder buffer size: %i",
+ // pSles->bufSizeInBytes);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
// Update our model of the record queue
- SLuint32 rxRearNext = pSles->rxRear+1;
+ SLuint32 rxRearNext = pSles->rxRear + 1;
if (rxRearNext > pSles->rxBufCount) {
rxRearNext = 0;
}
@@ -137,15 +164,92 @@ static void recorderCallback(SLAndroidSimpleBufferQueueItf caller __unused, void
}
+// Write "count" amount of short from buffer to pSles->byteBufferPtr. This byteBuffer will read by
+// java code.
+ssize_t byteBuffer_write(sles_data *pSles, char *buffer, size_t count) {
+ // bytebufferSize is in byte
+ int32_t rear; // rear should not exceed 2^31 - 1, or else overflow will happen
+ memcpy(&rear, (char *) (pSles->byteBufferPtr + pSles->byteBufferLength - 4), sizeof(rear));
+
+ size_t frameSize = pSles->channels * sizeof(short); // only one channel
+ int32_t maxLengthInShort = (pSles->byteBufferLength - 4) / frameSize;
+ // mask the upper bits to get the correct position in the pipe
+ int32_t tempRear = rear & (maxLengthInShort - 1);
+ size_t part1 = maxLengthInShort - tempRear;
+
+ if (part1 > count) {
+ part1 = count;
+ }
+
+ if (part1 > 0) {
+ memcpy(pSles->byteBufferPtr + (tempRear * frameSize), buffer,
+ part1 * frameSize);
+
+ size_t part2 = count - part1;
+ if (part2 > 0) {
+ memcpy(pSles->byteBufferPtr, (buffer + (part1 * frameSize)),
+ part2 * frameSize);
+ }
+
+ //TODO do we need something similar to the below function call?
+ //android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mRear, availToWrite),
+ // &fifo->mRear);
+ }
+
+ // increase value of rear
+ int32_t* rear2 = (int32_t *) (pSles->byteBufferPtr + pSles->byteBufferLength - 4);
+ *rear2 += count;
+ return count;
+}
+
+
+// Called in the beginning of recorderCallback() to collect the interval between each
+// recorderCallback().
+void collectRecorderBufferPeriod(sles_data *pSles) {
+ struct timespec recorder_time;
+ clock_gettime(CLOCK_MONOTONIC, &recorder_time);
+
+ pSles->recorder_current_time_sec = recorder_time.tv_sec;
+ pSles->recorder_current_time_nsec = recorder_time.tv_nsec;
+ (pSles->recorder_buffer_count)++;
+
+ if (pSles->recorder_previous_time_sec != 0 &&
+ pSles->recorder_buffer_count > BUFFER_PERIOD_DISCARD){
+ int diff_in_second = pSles->recorder_current_time_sec - pSles->recorder_previous_time_sec;
+ long diff_in_nano = pSles->recorder_current_time_nsec - pSles->recorder_previous_time_nsec;
+
+ // diff_in_milli is rounded up
+ long long total_diff_in_nano = (diff_in_second * NANOS_PER_SECOND) + diff_in_nano;
+ int diff_in_milli = (int) ((total_diff_in_nano + NANOS_PER_MILLI - 1) / NANOS_PER_MILLI);
+
+ if (diff_in_milli > pSles->recorder_max_buffer_period) {
+ pSles->recorder_max_buffer_period = diff_in_milli;
+ }
+
+ // from 0 ms to 1000 ms, plus a sum of all instances > 1000ms
+ if (diff_in_milli >= (RANGE - 1)) {
+ (pSles->recorder_buffer_period)[RANGE-1]++;
+ } else if (diff_in_milli >= 0) {
+ (pSles->recorder_buffer_period)[diff_in_milli]++;
+ } else { // for diff_in_milli < 0
+ __android_log_print(ANDROID_LOG_INFO, "sles_recorder", "Having negative BufferPeriod.");
+ }
+ }
+
+ pSles->recorder_previous_time_sec = pSles->recorder_current_time_sec;
+ pSles->recorder_previous_time_nsec = pSles->recorder_current_time_nsec;
+}
+
+
// Called after audio player empties a buffer of data
static void playerCallback(SLBufferQueueItf caller __unused, void *context) {
sles_data *pSles = (sles_data*) context;
if (pSles != NULL) {
-
+ collectPlayerBufferPeriod(pSles);
SLresult result;
pthread_mutex_lock(&(pSles->mutex));
- //ee SLES_PRINTF("<P");
+ //ee SLES_PRINTF("<P");
// Get the buffer that just finished playing
assert(pSles->txFront <= pSles->txBufCount);
@@ -156,38 +260,63 @@ static void playerCallback(SLBufferQueueItf caller __unused, void *context) {
pSles->txFront = 0;
}
+ if (pSles->testType == TEST_TYPE_LATENCY) {
+ ssize_t actual = audio_utils_fifo_read(&(pSles->fifo), buffer, pSles->bufSizeInFrames);
+ if (actual != (ssize_t) pSles->bufSizeInFrames) {
+ write(1, "/", 1);
+ // on underrun from pipe, substitute silence
+ memset(buffer, 0, pSles->bufSizeInFrames * pSles->channels * sizeof(short));
+ }
- ssize_t actual = audio_utils_fifo_read(&(pSles->fifo), buffer, pSles->bufSizeInFrames);
- if (actual != (ssize_t) pSles->bufSizeInFrames) {
- write(1, "/", 1);
- // on underrun from pipe, substitute silence
- memset(buffer, 0, pSles->bufSizeInFrames * pSles->channels * sizeof(short));
- }
-
- if (pSles->injectImpulse == -1) {
- // Experimentally, a single frame impulse was insufficient to trigger feedback.
- // Also a Nyquist frequency signal was also insufficient, probably because
- // the response of output and/or input path was not adequate at high frequencies.
- // This short burst of a few cycles of square wave at Nyquist/4 was found to work well.
- for (unsigned i = 0; i < pSles->bufSizeInFrames / 8; i += 8) {
- for (int j = 0; j < 8; j++) {
- for (unsigned k = 0; k < pSles->channels; k++) {
- ((short *)buffer)[(i+j)*pSles->channels+k] = j < 4 ? 0x7FFF : 0x8000;
+ if (pSles->injectImpulse == -1) { // here we inject pulse
+ // Experimentally, a single frame impulse was insufficient to trigger feedback.
+ // Also a Nyquist frequency signal was also insufficient, probably because
+ // the response of output and/or input path was not adequate at high frequencies.
+ // This short burst of a few cycles of square wave at Nyquist/4 found to work well.
+ for (unsigned i = 0; i < pSles->bufSizeInFrames / 8; i += 8) {
+ for (int j = 0; j < 8; j++) {
+ for (unsigned k = 0; k < pSles->channels; k++) {
+ ((short *) buffer)[(i + j) * pSles->channels + k] =
+ j < 4 ? 0x7FFF : 0x8000;
+ }
}
}
+ pSles->injectImpulse = 0;
+ }
+ } else if (pSles->testType == TEST_TYPE_BUFFER_PERIOD) {
+ double twoPi = M_PI * 2;
+ int maxShort = 32767;
+ float amplitude = 0.8;
+ short value;
+ double phaseIncrement = pSles->frequency1 / pSles->sampleRate;
+ bool isGlitchEnabled = false;
+ for (unsigned i = 0; i < pSles->bufSizeInFrames; i++) {
+ value = (short) (sin(pSles->bufferTestPhase1) * maxShort * amplitude);
+ ((short *) buffer)[i] = value;
+
+ pSles->bufferTestPhase1 += twoPi * phaseIncrement;
+ // insert glitches if isGlitchEnabled == true, and insert it for every second
+ if (isGlitchEnabled && (pSles->count % pSles->sampleRate == 0)) {
+ pSles->bufferTestPhase1 += twoPi * phaseIncrement;
+ }
+
+ pSles->count++;
+
+ while (pSles->bufferTestPhase1 > twoPi) {
+ pSles->bufferTestPhase1 -= twoPi;
+ }
}
- pSles->injectImpulse = 0;
}
// Enqueue the filled buffer for playing
result = (*(pSles->playerBufferQueue))->Enqueue(pSles->playerBufferQueue, buffer,
- pSles->bufSizeInBytes);
+ pSles->bufSizeInBytes);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
// Update our model of the player queue
assert(pSles->txFront <= pSles->txBufCount);
assert(pSles->txRear <= pSles->txBufCount);
- SLuint32 txRearNext = pSles->txRear+1;
+ SLuint32 txRearNext = pSles->txRear + 1;
if (txRearNext > pSles->txBufCount) {
txRearNext = 0;
}
@@ -195,14 +324,50 @@ static void playerCallback(SLBufferQueueItf caller __unused, void *context) {
pSles->txBuffers[pSles->txRear] = buffer;
pSles->txRear = txRearNext;
-
- //ee SLES_PRINTF("p>");
pthread_mutex_unlock(&(pSles->mutex));
-
} //pSles not null
}
-int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource) {
+// Called in the beginning of playerCallback() to collect the interval between each
+// playerCallback().
+void collectPlayerBufferPeriod(sles_data *pSles) {
+ struct timespec player_time;
+ clock_gettime(CLOCK_MONOTONIC, &player_time);
+
+ pSles->player_current_time_sec = player_time.tv_sec;
+ pSles->player_current_time_nsec = player_time.tv_nsec;
+ (pSles->player_buffer_count)++;
+
+ if (pSles->player_previous_time_sec != 0 &&
+ pSles->player_buffer_count > BUFFER_PERIOD_DISCARD) {
+ int diff_in_second = pSles->player_current_time_sec - pSles->player_previous_time_sec;
+ long diff_in_nano = pSles->player_current_time_nsec - pSles->player_previous_time_nsec;
+
+ // diff_in_milli is rounded up
+ long long total_diff_in_nano = (diff_in_second * NANOS_PER_SECOND) + diff_in_nano;
+ int diff_in_milli = (int) ((total_diff_in_nano + NANOS_PER_MILLI - 1) / NANOS_PER_MILLI);
+
+ if (diff_in_milli > pSles->player_max_buffer_period) {
+ pSles->player_max_buffer_period = diff_in_milli;
+ }
+
+ // from 0 ms to 1000 ms, plus a sum of all instances > 1000ms
+ if (diff_in_milli >= (RANGE - 1)) {
+ (pSles->player_buffer_period)[RANGE-1]++;
+ } else if (diff_in_milli >= 0) {
+ (pSles->player_buffer_period)[diff_in_milli]++;
+ } else { // for diff_in_milli < 0
+ __android_log_print(ANDROID_LOG_INFO, "sles_player", "Having negative BufferPeriod.");
+ }
+ }
+
+ pSles->player_previous_time_sec = pSles->player_current_time_sec;
+ pSles->player_previous_time_nsec = pSles->player_current_time_nsec;
+}
+
+
+int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
+ int testType, double frequency1, char* byteBufferPtr, int byteBufferLength) {
int status = SLES_FAIL;
if (pSles != NULL) {
@@ -269,7 +434,7 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
pSles->freeRear; // next to be freed
pSles->fifo; //(*)
- pSles->fifo2Buffer = NULL;
+ pSles->fifo2Buffer = NULL; //this fifo is for sending data to java code (to plot it)
pSles->recorderBufferQueue;
pSles->playerBufferQueue;
@@ -281,7 +446,7 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
pSles->bufSizeInBytes = pSles->channels * pSles->bufSizeInFrames * sizeof(short);
// Initialize free buffers
- pSles->freeBuffers = (char **) calloc(pSles->freeBufCount+1, sizeof(char *));
+ pSles->freeBuffers = (char **) calloc(pSles->freeBufCount + 1, sizeof(char *));
unsigned j;
for (j = 0; j < pSles->freeBufCount; ++j) {
pSles->freeBuffers[j] = (char *) malloc(pSles->bufSizeInBytes);
@@ -291,12 +456,12 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
pSles->freeBuffers[j] = NULL;
// Initialize record queue
- pSles->rxBuffers = (char **) calloc(pSles->rxBufCount+1, sizeof(char *));
+ pSles->rxBuffers = (char **) calloc(pSles->rxBufCount + 1, sizeof(char *));
pSles->rxFront = 0;
pSles->rxRear = 0;
// Initialize play queue
- pSles->txBuffers = (char **) calloc(pSles->txBufCount+1, sizeof(char *));
+ pSles->txBuffers = (char **) calloc(pSles->txBufCount + 1, sizeof(char *));
pSles->txFront = 0;
pSles->txRear = 0;
@@ -325,6 +490,32 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
// sndfile = NULL;
// }
+ //init recorder buffer period data
+ pSles->recorder_buffer_period = new int[RANGE](); // initialized to zeros
+ pSles->recorder_previous_time_sec = 0;
+ pSles->recorder_previous_time_nsec = 0;
+ pSles->recorder_current_time_sec = 0;
+ pSles->recorder_current_time_nsec = 0;
+ pSles->recorder_buffer_count = 0;
+ pSles->recorder_max_buffer_period = 0;
+
+ //init player buffer period data
+ pSles->player_buffer_period = new int[RANGE](); // initialized to zeros
+ pSles->player_previous_time_sec = 0;
+ pSles->player_previous_time_nsec = 0;
+ pSles->player_current_time_sec = 0;
+ pSles->player_current_time_nsec = 0;
+ pSles->player_buffer_count = 0;
+ pSles->player_max_buffer_period = 0;
+
+ // init other variables needed for buffer test
+ pSles->testType = testType;
+ pSles->frequency1 = frequency1;
+ pSles->bufferTestPhase1 = 0;
+ pSles->count = 0;
+ pSles->byteBufferPtr = byteBufferPtr;
+ pSles->byteBufferLength = byteBufferLength;
+
SLresult result;
// create engine
@@ -392,7 +583,7 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
&(pSles->playerBufferQueue));
ASSERT_EQ(SL_RESULT_SUCCESS, result);
result = (*(pSles->playerBufferQueue))->RegisterCallback(pSles->playerBufferQueue,
- playerCallback, pSles);
+ playerCallback, pSles); //playerCallback is the name of callback function
ASSERT_EQ(SL_RESULT_SUCCESS, result);
// Enqueue some zero buffers for the player
@@ -423,21 +614,26 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
// Create an audio recorder with microphone device source and buffer queue sink.
// The buffer queue as sink is an Android-specific extension.
-
SLDataLocator_IODevice locator_iodevice;
SLDataLocator_AndroidSimpleBufferQueue locator_bufferqueue_rx;
+
locator_iodevice.locatorType = SL_DATALOCATOR_IODEVICE;
locator_iodevice.deviceType = SL_IODEVICE_AUDIOINPUT;
locator_iodevice.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT;
locator_iodevice.device = NULL;
+
audiosrc.pLocator = &locator_iodevice;
audiosrc.pFormat = NULL;
+
locator_bufferqueue_rx.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
locator_bufferqueue_rx.numBuffers = pSles->rxBufCount;
+
audiosnk.pLocator = &locator_bufferqueue_rx;
audiosnk.pFormat = &pcm;
- {
- SLInterfaceID ids_rx[2] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION};
+
+ { //why brackets here?
+ SLInterfaceID ids_rx[2] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+ SL_IID_ANDROIDCONFIGURATION};
SLboolean flags_rx[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioRecorder(engineEngine, &(pSles->recorderObject),
&audiosrc, &audiosnk, 2, ids_rx, flags_rx);
@@ -447,7 +643,7 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
status = SLES_FAIL;
SLES_PRINTF("ERROR: Could not create audio recorder (result %x), "
- "check sample rate and channel count\n", result);
+ "check sample rate and channel count\n", result);
goto cleanup;
}
}
@@ -456,10 +652,12 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
{
/* Get the Android configuration interface which is explicit */
SLAndroidConfigurationItf configItf;
- result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject, SL_IID_ANDROIDCONFIGURATION, (void*)&configItf);
+ result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+ SL_IID_ANDROIDCONFIGURATION, (void*)&configItf);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
- SLuint32 presetValue = micSource;//SL_ANDROID_RECORDING_PRESET_CAMCORDER;//SL_ANDROID_RECORDING_PRESET_NONE;
+ SLuint32 presetValue = micSource;
+ //SL_ANDROID_RECORDING_PRESET_CAMCORDER;//SL_ANDROID_RECORDING_PRESET_NONE;
/* Use the configuration interface to configure the recorder before it's realized */
if (presetValue != SL_ANDROID_RECORDING_PRESET_NONE) {
@@ -472,13 +670,16 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
result = (*(pSles->recorderObject))->Realize(pSles->recorderObject, SL_BOOLEAN_FALSE);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
SLRecordItf recorderRecord;
result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject, SL_IID_RECORD,
&recorderRecord);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &(pSles->recorderBufferQueue));
ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
result = (*(pSles->recorderBufferQueue))->RegisterCallback(pSles->recorderBufferQueue,
recorderCallback, pSles);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
@@ -518,16 +719,18 @@ int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int mic
SLES_PRINTF("Finished initialization with status: %d", status);
- int xx =1;
+ int xx = 1;
}
return status;
}
+// Read data from fifo2Buffer and store into pSamples.
int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples) {
//int status = SLES_FAIL;
- SLES_PRINTF("slesProcessNext: pSles = %p, currentSample: %p, maxSamples = %d", pSles, pSamples, maxSamples);
+ SLES_PRINTF("slesProcessNext: pSles = %p, currentSample: %p, maxSamples = %d",
+ pSles, pSamples, maxSamples);
int samplesRead = 0;
@@ -539,7 +742,7 @@ int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples) {
SLresult result;
for (int i = 0; i < 10; i++) {
- usleep(100000);
+ usleep(100000); // sleep for 0.1s
if (pSles->fifo2Buffer != NULL) {
for (;;) {
short buffer[pSles->bufSizeInFrames * pSles->channels];
@@ -548,12 +751,12 @@ int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples) {
if (actual <= 0)
break;
{
- for (int jj =0; jj<actual && currentSample < maxSamples; jj++) {
- *(pCurrentSample++) = ((double)buffer[jj])/maxValue;
+ for (int jj = 0; jj < actual && currentSample < maxSamples; jj++) {
+ *(pCurrentSample++) = ((double) buffer[jj]) / maxValue;
currentSample++;
}
}
- samplesRead +=actual;
+ samplesRead += actual;
}
}
if (pSles->injectImpulse > 0) {
@@ -572,32 +775,31 @@ int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples) {
}
SLBufferQueueState playerBQState;
result = (*(pSles->playerBufferQueue))->GetState(pSles->playerBufferQueue,
- &playerBQState);
+ &playerBQState);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
SLAndroidSimpleBufferQueueState recorderBQState;
result = (*(pSles->recorderBufferQueue))->GetState(pSles->recorderBufferQueue,
- &recorderBQState);
+ &recorderBQState);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
- SLES_PRINTF("End of slesProcessNext: pSles = %p, samplesRead = %d, maxSamples= %d", pSles, samplesRead, maxSamples);
+ SLES_PRINTF("End of slesProcessNext: pSles = %p, samplesRead = %d, maxSamples = %d",
+ pSles, samplesRead, maxSamples);
}
return samplesRead;
}
+
+
int slesDestroyServer(sles_data *pSles) {
int status = SLES_FAIL;
SLES_PRINTF("Start slesDestroyServer: pSles = %p", pSles);
if (pSles != NULL) {
-
-
-
if (NULL != pSles->playerObject) {
-
SLES_PRINTF("stopping player...");
SLPlayItf playerPlay;
- SLresult result = (*(pSles->playerObject))->GetInterface(pSles->playerObject, SL_IID_PLAY,
- &playerPlay);
+ SLresult result = (*(pSles->playerObject))->GetInterface(pSles->playerObject,
+ SL_IID_PLAY, &playerPlay);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
@@ -607,22 +809,18 @@ int slesDestroyServer(sles_data *pSles) {
}
if (NULL != pSles->recorderObject) {
-
-
SLES_PRINTF("stopping recorder...");
SLRecordItf recorderRecord;
- SLresult result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject, SL_IID_RECORD,
- &recorderRecord);
+ SLresult result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+ SL_IID_RECORD, &recorderRecord);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
-
result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_STOPPED);
ASSERT_EQ(SL_RESULT_SUCCESS, result);
}
usleep(1000);
-
audio_utils_fifo_deinit(&(pSles->fifo));
delete[] pSles->fifoBuffer;
@@ -654,7 +852,7 @@ int slesDestroyServer(sles_data *pSles) {
SLES_PRINTF("slesDestroyServer 7");
// free(pSles);
-// pSles=NULL;
+// pSles = NULL;
status = SLES_SUCCESS;
}
@@ -662,3 +860,22 @@ int slesDestroyServer(sles_data *pSles) {
return status;
}
+
+int* slesGetRecorderBufferPeriod(sles_data *pSles) {
+ return pSles->recorder_buffer_period;
+}
+
+
+int slesGetRecorderMaxBufferPeriod(sles_data *pSles) {
+ return pSles->recorder_max_buffer_period;
+}
+
+
+int* slesGetPlayerBufferPeriod(sles_data *pSles) {
+ return pSles->player_buffer_period;
+}
+
+
+int slesGetPlayerMaxBufferPeriod(sles_data *pSles) {
+ return pSles->player_max_buffer_period;
+} \ No newline at end of file
diff --git a/LoopbackApp/app/src/main/jni/sles.h b/LoopbackApp/app/src/main/jni/sles.h
index defd1d7..57690f1 100644
--- a/LoopbackApp/app/src/main/jni/sles.h
+++ b/LoopbackApp/app/src/main/jni/sles.h
@@ -19,6 +19,7 @@
#include <pthread.h>
#include <android/log.h>
+
#ifndef _Included_org_drrickorang_loopback_sles
#define _Included_org_drrickorang_loopback_sles
@@ -31,6 +32,7 @@ extern "C" {
#endif
#include <audio_utils/fifo.h>
+//TODO fix this
typedef struct {
SLuint32 rxBufCount; // -r#
SLuint32 txBufCount; // -t#
@@ -69,24 +71,65 @@ typedef struct {
SLObjectItf recorderObject;
SLObjectItf outputmixObject;
SLObjectItf engineObject;
+
+ int* recorder_buffer_period;
+ int recorder_previous_time_sec;
+ int recorder_previous_time_nsec;
+ int recorder_current_time_sec;
+ int recorder_current_time_nsec;
+ int recorder_buffer_count;
+ int recorder_max_buffer_period;
+
+ int* player_buffer_period;
+ time_t player_previous_time_sec;
+ long player_previous_time_nsec;
+ time_t player_current_time_sec;
+ long player_current_time_nsec;
+ int player_buffer_count;
+ int player_max_buffer_period;
+
+ int testType;
+ double frequency1;
+ double bufferTestPhase1;
+ int count;
+ char* byteBufferPtr;
+ int byteBufferLength;
} sles_data;
enum {
SLES_SUCCESS = 0,
SLES_FAIL = 1,
+ NANOS_PER_MILLI = 1000000,
+ NANOS_PER_SECOND = 1000000000,
+ RANGE = 1002,
+ BUFFER_PERIOD_DISCARD = 10,
+ TEST_TYPE_LATENCY = 222,
+ TEST_TYPE_BUFFER_PERIOD = 223
} SLES_STATUS_ENUM;
-int slesInit( sles_data ** ppSles, int samplingRate, int frameCount, int micSource);
+int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource,
+ int testType, double frequency1, char* byteBufferPtr, int byteBufferLength);
+
//note the double pointer to properly free the memory of the structure
-int slesDestroy( sles_data ** ppSles);
+int slesDestroy(sles_data ** ppSles);
///full
int slesFull(sles_data *pSles);
-int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource);
+int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
+ int testType, double frequency1, char* byteBufferPtr, int byteBufferLength);
int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples);
int slesDestroyServer(sles_data *pSles);
+int* slesGetRecorderBufferPeriod(sles_data *pSles);
+int slesGetRecorderMaxBufferPeriod(sles_data *pSles);
+int* slesGetPlayerBufferPeriod(sles_data *pSles);
+int slesGetPlayerMaxBufferPeriod(sles_data *pSles);
+
+void collectPlayerBufferPeriod(sles_data *pSles);
+void collectRecorderBufferPeriod(sles_data *pSles);
+
+ssize_t byteBuffer_write(sles_data *pSles, char *buffer, size_t count);
#ifdef __cplusplus
}
diff --git a/LoopbackApp/app/src/main/res/layout/about_activity.xml b/LoopbackApp/app/src/main/res/layout/about_activity.xml
new file mode 100644
index 0000000..1c17cf7
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/about_activity.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:background="#FFFFFF">
+
+ <TextView
+ android:id="@+id/AboutInfo"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:text="@string/AboutInfo"
+ android:textSize="15sp"
+ android:autoLink="all" />
+
+</LinearLayout> \ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/glitches_activity.xml b/LoopbackApp/app/src/main/res/layout/glitches_activity.xml
new file mode 100644
index 0000000..f69d7cb
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/glitches_activity.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:background="#FFFFFF">
+
+ <ScrollView
+ android:id="@+id/GlitchesScroll"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:scrollbars="vertical"
+ android:fillViewport="true">
+ <TextView
+ android:id="@+id/GlitchesInfo"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:textSize="15sp" />
+ </ScrollView>
+
+</LinearLayout> \ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/main_activity.xml b/LoopbackApp/app/src/main/res/layout/main_activity.xml
index 89f2f4e..826e670 100644
--- a/LoopbackApp/app/src/main/res/layout/main_activity.xml
+++ b/LoopbackApp/app/src/main/res/layout/main_activity.xml
@@ -22,38 +22,50 @@
android:layout_height="match_parent"
android:orientation="vertical"
android:background="#FFFFFF">
-
- <LinearLayout
- xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:tools="http://schemas.android.com/tools"
- android:layout_width="match_parent"
- android:layout_height="wrap_content"
- android:orientation="horizontal">
-
- <Button
- xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonTest"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/buttonTest_enabled"
- android:onClick="onButtonTest"/>
-
- <Button
- xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonSave"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/buttonSave"
- android:onClick="onButtonSave"/>
-
- <Button
+ <HorizontalScrollView
+ android:id="@+id/ScrollView1"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content">
+ <LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonSettings"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/buttonSettings"
- android:onClick="onButtonSettings"/>
- </LinearLayout>
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:orientation="horizontal">
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonTest"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonTest_enabled"
+ android:onClick="onButtonLatencyTest"/>
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonStopTest"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonStopTest"
+ android:onClick="onButtonStopTest"/>
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonSave"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonSave"
+ android:onClick="onButtonSave"/>
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonSettings"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonSettings"
+ android:onClick="onButtonSettings"/>
+ </LinearLayout>
+ </HorizontalScrollView>
<TextView
android:id="@+id/textInfo"
@@ -62,69 +74,128 @@
android:textColor="#000000"
android:text="@string/labelInfo"/>
- <LinearLayout
- xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:tools="http://schemas.android.com/tools"
- android:layout_width="match_parent"
- android:layout_height="wrap_content"
- android:orientation="horizontal">
-
- <Button
- xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonZoomIn"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/buttonZoomIn"
- android:onClick="onButtonZoomIn"/>
-
- <Button
- xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonZoomOut"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/buttonZoomOut"
- android:onClick="onButtonZoomOut"/>
-
- <Button
+ <HorizontalScrollView
+ android:id="@+id/ScrollView2"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content">
+ <LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonZoomOutFull"
- android:layout_width="wrap_content"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_width="match_parent"
android:layout_height="wrap_content"
- android:text="@string/buttonZoomOutFull"
- android:onClick="onButtonZoomOutFull"/>
- <Button
+ android:orientation="horizontal">
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonZoomIn"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonZoomIn"
+ android:onClick="onButtonZoomIn"/>
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonZoomOut"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonZoomOut"
+ android:onClick="onButtonZoomOut"/>
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonZoomOutFull"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonZoomOutFull"
+ android:onClick="onButtonZoomOutFull"/>
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonAbout"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonAbout"
+ android:onClick="onButtonAbout"/>
+ </LinearLayout>
+ </HorizontalScrollView>
+
+ <HorizontalScrollView
+ android:id="@+id/ScrollView3"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content">
+ <LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonAbout"
- android:layout_width="wrap_content"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_width="match_parent"
android:layout_height="wrap_content"
- android:text="@string/buttonAbout"
- android:onClick="onButtonAbout"/>
-
- </LinearLayout>
-
- <LinearLayout
- xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:tools="http://schemas.android.com/tools"
- android:layout_width="match_parent"
- android:layout_height="wrap_content"
- android:orientation="horizontal">
-
-<!--
- <Button
+ android:orientation="horizontal">
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonBufferTest"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonBufferTest"
+ android:onClick="onButtonBufferTest"/>
+
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonGlitches"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonGlitches"
+ android:onClick="onButtonGlitches"/>
+ </LinearLayout>
+ </HorizontalScrollView>
+
+ <HorizontalScrollView
+ android:id="@+id/ScrollView4"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content">
+ <LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonBufferPeriod"
- android:layout_width="wrap_content"
+ xmlns:tools="http://schemas.android.com/tools"
+ android:layout_width="match_parent"
android:layout_height="wrap_content"
- android:text="@string/buttonBufferPeriod"
- android:onClick="onButtonBufferPeriod"/>
--->
- </LinearLayout>
+ android:orientation="horizontal">
+
+ <TextView
+ android:id="@+id/bufferPeriods"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textColor="#000000"
+ android:textSize="18sp"
+ android:text="@string/showBufferPeriods"/>
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonRecorderBufferPeriod"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonRecorderBufferPeriod"
+ android:onClick="onButtonRecorderBufferPeriod"/>
+
+ <Button
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/buttonPlayerBufferPeriod"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/buttonPlayerBufferPeriod"
+ android:onClick="onButtonPlayerBufferPeriod"/>
+ </LinearLayout>
+ </HorizontalScrollView>
<LinearLayout
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="wrap_content">
+ <TextView
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="Current Level"
+ android:id="@+id/textViewCurrentLevel"/>
+
<SeekBar
android:id="@+id/BarMasterLevel"
android:indeterminate="false"
@@ -141,18 +212,9 @@
android:layout_height="wrap_content">
<TextView
- android:layout_width="100dp"
+ android:layout_width="250dp"
android:layout_height="wrap_content"
- android:text="Current Level"
- android:id="@+id/textViewCurrentLevel"/>
-
-<!--
android:text="latency"
--->
- <TextView
- android:layout_width="180dp"
- android:layout_height="wrap_content"
- android:text=""
android:id="@+id/textViewEstimatedLatency"
android:textStyle="bold"/>
</LinearLayout>
diff --git a/LoopbackApp/app/src/main/res/layout/player_buffer_period_activity.xml b/LoopbackApp/app/src/main/res/layout/player_buffer_period_activity.xml
new file mode 100644
index 0000000..285c48e
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/player_buffer_period_activity.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<LinearLayout
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:custom="http://schemas.android.com/apk/res-auto"
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent">
+
+ <TextView
+ android:id="@+id/writeHistogramInfo"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center"
+ android:text="@string/WriteHistTitle"/>
+
+ <LinearLayout
+ android:layout_marginTop="0mm"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center_vertical"
+ android:gravity="center_horizontal"
+ android:orientation="horizontal"
+ android:layout_weight="1">
+ <org.drrickorang.loopback.HistogramView
+ android:id="@+id/viewWriteHistogram"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layout_weight="1" />
+ </LinearLayout>
+
+</LinearLayout> \ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/buffer_period_activity.xml b/LoopbackApp/app/src/main/res/layout/recorder_buffer_period_activity.xml
index 3c1b000..49b8113 100644
--- a/LoopbackApp/app/src/main/res/layout/buffer_period_activity.xml
+++ b/LoopbackApp/app/src/main/res/layout/recorder_buffer_period_activity.xml
@@ -1,4 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:custom="http://schemas.android.com/apk/res-auto"
@@ -6,11 +21,11 @@
android:layout_height="match_parent">
<TextView
- android:id="@+id/histogramInfo"
+ android:id="@+id/readHistogramInfo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
- android:text="@string/histTitle"/>
+ android:text="@string/ReadHistTitle"/>
<LinearLayout
android:layout_marginTop="0mm"
@@ -21,11 +36,10 @@
android:orientation="horizontal"
android:layout_weight="1">
<org.drrickorang.loopback.HistogramView
- android:id="@+id/viewHistogram"
+ android:id="@+id/viewReadHistogram"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
- android:layout_weight="1"
- android:text="latency"/>
+ android:layout_weight="1" />
</LinearLayout>
</LinearLayout>
diff --git a/LoopbackApp/app/src/main/res/layout/settings_activity.xml b/LoopbackApp/app/src/main/res/layout/settings_activity.xml
index 8aa61ca..e5ed94d 100644
--- a/LoopbackApp/app/src/main/res/layout/settings_activity.xml
+++ b/LoopbackApp/app/src/main/res/layout/settings_activity.xml
@@ -43,7 +43,7 @@
android:background="@android:color/darker_gray"/>
<TextView
- android:id="@+id/text6"
+ android:id="@+id/textMicSource"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/labelMicSource"/>
@@ -58,7 +58,7 @@
android:background="@android:color/darker_gray"/>
<TextView
- android:id="@+id/text3"
+ android:id="@+id/textAudioThreadType"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/labelAudioThreadType"/>
@@ -74,14 +74,14 @@
<Button
xmlns:android="http://schemas.android.com/apk/res/android"
- android:id="@+id/buttonPlaybackDefault"
+ android:id="@+id/buttonDefaultSettings"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
- android:text="@string/buttonPlaybackDefault"
+ android:text="@string/buttonDefaultSettings"
android:onClick="onButtonClick"/>
<TextView
- android:id="@+id/text2"
+ android:id="@+id/textSamplingRate"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/labelSamplingRate"/>
@@ -97,12 +97,12 @@
android:background="@android:color/darker_gray"/>
<TextView
- android:id="@+id/text4"
+ android:id="@+id/textPlayerBuffer"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
- android:text="@string/labelPlaybackBuffer"/>
+ android:text="@string/labelPlayerBuffer"/>
<NumberPicker
- android:id="@+id/numberpickerPlaybackBuffer"
+ android:id="@+id/numberpickerPlayerBuffer"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
@@ -112,13 +112,47 @@
android:background="@android:color/darker_gray"/>
<TextView
- android:id="@+id/text5"
+ android:id="@+id/textRecorderBuffer"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
- android:text="@string/labelRecordBuffer"/>
+ android:text="@string/labelRecorderBuffer"/>
<NumberPicker
- android:id="@+id/numberpickerRecordBuffer"
+ android:id="@+id/numberpickerRecorderBuffer"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"/>
+
+
+ <View
+ android:layout_width="fill_parent"
+ android:layout_height="1dp"
+ android:background="@android:color/darker_gray"/>
+
+ <TextView
+ android:id="@+id/textBufferTestDuration"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/labelBufferTestDuration"/>
+
+ <NumberPicker
+ android:id="@+id/numberpickerBufferTestDuration"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"/>
+
+
+ <View
+ android:layout_width="fill_parent"
+ android:layout_height="1dp"
+ android:background="@android:color/darker_gray"/>
+
+ <TextView
+ android:id="@+id/textBufferTestWavePlotDuration"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/labelBufferTestWavePlotDuration"/>
+
+ <NumberPicker
+ android:id="@+id/numberPickerBufferTestWavePlotDuration"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
diff --git a/LoopbackApp/app/src/main/res/values/strings.xml b/LoopbackApp/app/src/main/res/values/strings.xml
index 7814d4d..0840188 100644
--- a/LoopbackApp/app/src/main/res/values/strings.xml
+++ b/LoopbackApp/app/src/main/res/values/strings.xml
@@ -20,15 +20,21 @@
<string name="buttonPlay_play">Refresh Screen</string>
<string name="buttonPlay_pause">Pause</string>
- <string name="buttonTest_enabled">Start test</string>
+ <string name="buttonTest_enabled">Latency Test</string>
+ <string name="buttonStopTest">Stop Test</string>
<string name="buttonTest_disabled">FX Disabled Loopback 2</string>
- <string name="buttonSave">Save Wavefile</string>
+ <string name="buttonSave">Save Results</string>
<string name="buttonZoomOutFull">Unzoom</string>
<string name="buttonZoomOut">Zoom Out</string>
<string name="buttonZoomIn"> Zoom In</string>
<string name="buttonAbout">About</string>
- <string name="buttonBufferPeriod">Buffer Period</string>
- <string name="histTitle">Frequency vs. Buffer Period (ms) Plot</string>
+ <string name="buttonRecorderBufferPeriod">Recorder</string>
+ <string name="buttonPlayerBufferPeriod">Player</string>
+ <string name="ReadHistTitle">Frequency vs. Recorder Buffer Period (ms) Plot</string>
+ <string name="WriteHistTitle">Frequency vs. Player Buffer Period (ms) Plot</string>
+ <string name="buttonBufferTest">Detect Glitch</string>
+ <string name="buttonGlitches">Show Glitches</string>
+ <string name="showBufferPeriods">Show Buffer Periods: </string>
<!-- disabled -->
<string name="buttonZoomInFull">In Full</string>
@@ -43,12 +49,44 @@
<item>CAMCORDER</item>
<item>VOICE_RECOGNITION</item>
<item>VOICE_COMMUNICATION</item>
+ <item>REMOTE_SUBMIX_java_only</item>
</string-array>
<string name="labelInfo">Info...</string>
<string name="labelSettings">SETTINGS</string>
<string name="labelAbout">About</string>
<string name="labelSamplingRate">Sampling Rate</string>
+ <string name="AboutInfo">Round-trip audio latency testing app\n
+ using the Dr. Rick O\'Rang audio loopback dongle.\n
+ Authors: Ricardo Garcia (rago) and Tzu-Yin Tai\n
+ Open source project on:\n
+ https://github.com/gkasten/drrickorang\n
+ References:\n
+ https://source.android.com/devices/audio/latency.html\n
+ https://goo.gl/dxcw0d\n\n\n
+ adb parameters: all parameters are optional. If not specified, defaults will be used.\n
+ -ei SF \t\t\t\t\t\t\t\t\t\t\t\t ####\t\t sampling frequency \n
+ -es Filename \t\t\t\t\t\t\t ssss\t\t\t output filename \n
+ -ei MicSource \t\t\t\t\t\t\t ####\t\t microphone source\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0: DEFAULT\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 1: MIC\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 2: CAMCORDER\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 3: VOICE_RECOGNITION\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 4: VOICE_COMMUNICATION\n
+ -ei AudioThread \t\t\t\t\t ####\t\t Audio Thread Type\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0: Java\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 1: Native (JNI)\n
+ -ei AudioLevel \t\t\t\t\t\t ####\t\t Audio Level [0:15]\n
+ -ei RecorderBuffer \t\t\t\t ####\t\t Recorder Buffer Frames\n
+ -ei PlayerBuffer \t\t\t\t\t\t ####\t\t Player Buffer Frames\n
+ -ei TestType \t\t\t\t\t\t\t\t ####\t\t Audio Test Type\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 222: Latency Test\n
+ \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 223: Buffer Test\n
+ -ei BufferTestDuration \t ####\t\t Buffer Test Duration \n\n\n
+ Example: adb shell am start -n org.drrickorang.loopback/.LoopbackActivity
+ --ei SF 48000 --es FileName output --ei MicSource 3 --ei AudioThread 1 --ei AudioLevel 12
+ -ei TestType 223 --ei BufferTestDuration 5
+ </string>
<!-- spinnerSamplingRate Options -->
<string-array name="samplingRate_array">
@@ -67,10 +105,13 @@
<item>native (JNI)</item>
</string-array>
- <string name="labelPlaybackBuffer">Playback Buffer (Frames)</string>
- <string name="labelRecordBuffer">Record Buffer (Frames)</string>
- <string name="buttonPlaybackDefault">Compute Default Settings</string>
- <string name="buttonRecordDefault">System Default Record Buffer</string>
+ <string name="labelPlayerBuffer">Player Buffer (Frames) (Max: %1$d)</string>
+ <string name="labelRecorderBuffer">Recorder Buffer (Frames) (Max: %1$d)</string>
+ <string name="buttonDefaultSettings">Compute Default Settings</string>
+ <string name="buttonRecordDefault">System Default Recorder Buffer</string>
+ <string name="labelBufferTestDuration">Buffer Test Duration (Seconds) (Max: %1$d)</string>
+ <string name="labelBufferTestWavePlotDuration">Buffer Test Wave Plot Duration (Seconds)
+ (Max: %1$d)</string>
</resources>
diff --git a/LoopbackApp/build/intermediates/gradle_project_sync_data.bin b/LoopbackApp/build/intermediates/gradle_project_sync_data.bin
new file mode 100644
index 0000000..5918b8d
--- /dev/null
+++ b/LoopbackApp/build/intermediates/gradle_project_sync_data.bin
Binary files differ
diff --git a/LoopbackApp/local.properties b/LoopbackApp/local.properties
new file mode 100644
index 0000000..a8988ab
--- /dev/null
+++ b/LoopbackApp/local.properties
@@ -0,0 +1,12 @@
+## This file is automatically generated by Android Studio.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must *NOT* be checked into Version Control Systems,
+# as it contains information specific to your local configuration.
+#
+# Location of the SDK. This is only used by Gradle.
+# For customization when using a Version Control System, please read the
+# header note.
+#Thu Aug 06 10:05:10 PDT 2015
+sdk.dir=/usr/local/google/home/gkasten/Tools/adt-bundle-linux-x86_64-20140321/sdk
+ndk.dir=/usr/local/google/home/gkasten/Tools/android-ndk-r10d/ \ No newline at end of file