aboutsummaryrefslogtreecommitdiff
path: root/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
diff options
context:
space:
mode:
Diffstat (limited to 'webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java')
-rw-r--r--webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java4
1 files changed, 0 insertions, 4 deletions
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
index 7b31e08eed..ff77635843 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
@@ -192,10 +192,6 @@ class WebRtcAudioRecord {
Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
try {
- // TODO(henrika): the only supported audio source for input is currently
- // AudioSource.VOICE_COMMUNICATION. Is there any reason why we should
- // support other types, e.g. DEFAULT or MIC? Only reason I can think of
- // is if the device does not support VOICE_COMMUNICATION.
audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION,
sampleRate,
AudioFormat.CHANNEL_IN_MONO,