summaryrefslogtreecommitdiff
path: root/android/media
diff options
context:
space:
mode:
authorJustin Klaassen <justinklaassen@google.com>2017-10-10 15:20:13 -0400
committerJustin Klaassen <justinklaassen@google.com>2017-10-10 15:20:13 -0400
commit93b7ee4fce01df52a6607f0b1965cbafdfeaf1a6 (patch)
tree49f76f879a89c256a4f65b674086be50760bdffb /android/media
parentbc81c7ada5aab3806dd0b17498f5c9672c9b33c4 (diff)
downloadandroid-28-93b7ee4fce01df52a6607f0b1965cbafdfeaf1a6.tar.gz
Import Android SDK Platform P [4386628]
/google/data/ro/projects/android/fetch_artifact \ --bid 4386628 \ --target sdk_phone_armv7-win_sdk \ sdk-repo-linux-sources-4386628.zip AndroidVersion.ApiLevel has been modified to appear as 28 Change-Id: I9b8400ac92116cae4f033d173f7a5682b26ccba9
Diffstat (limited to 'android/media')
-rw-r--r--android/media/AmrInputStream.java39
-rw-r--r--android/media/AudioAttributes.java19
-rw-r--r--android/media/AudioManager.java10
-rw-r--r--android/media/AudioPortEventHandler.java26
-rw-r--r--android/media/ExifInterface.java7
-rw-r--r--android/media/MediaCodecInfo.java4
-rw-r--r--android/media/MediaRouter.java70
-rw-r--r--android/media/PlayerBase.java21
8 files changed, 124 insertions, 72 deletions
diff --git a/android/media/AmrInputStream.java b/android/media/AmrInputStream.java
index fb91bbbb..efaf2244 100644
--- a/android/media/AmrInputStream.java
+++ b/android/media/AmrInputStream.java
@@ -25,12 +25,12 @@ import android.util.Log;
/**
- * AmrInputStream
+ * DO NOT USE
* @hide
*/
public final class AmrInputStream extends InputStream {
private final static String TAG = "AmrInputStream";
-
+
// frame is 20 msec at 8.000 khz
private final static int SAMPLES_PER_FRAME = 8000 * 20 / 1000;
@@ -51,10 +51,10 @@ public final class AmrInputStream extends InputStream {
private byte[] mOneByte = new byte[1];
/**
- * Create a new AmrInputStream, which converts 16 bit PCM to AMR
- * @param inputStream InputStream containing 16 bit PCM.
+ * DO NOT USE - use MediaCodec instead
*/
public AmrInputStream(InputStream inputStream) {
+ Log.w(TAG, "@@@@ AmrInputStream is not a public API @@@@");
mInputStream = inputStream;
MediaFormat format = new MediaFormat();
@@ -83,17 +83,26 @@ public final class AmrInputStream extends InputStream {
mInfo = new BufferInfo();
}
+ /**
+ * DO NOT USE
+ */
@Override
public int read() throws IOException {
int rtn = read(mOneByte, 0, 1);
return rtn == 1 ? (0xff & mOneByte[0]) : -1;
}
+ /**
+ * DO NOT USE
+ */
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
+ /**
+ * DO NOT USE
+ */
@Override
public int read(byte[] b, int offset, int length) throws IOException {
if (mCodec == null) {
@@ -131,19 +140,15 @@ public final class AmrInputStream extends InputStream {
}
}
- // now read encoded data from the encoder (blocking, since we just filled up the
- // encoder's input with data it should be able to output at least one buffer)
- while (true) {
- int index = mCodec.dequeueOutputBuffer(mInfo, -1);
- if (index >= 0) {
- mBufIn = mInfo.size;
- ByteBuffer out = mCodec.getOutputBuffer(index);
- out.get(mBuf, 0 /* offset */, mBufIn /* length */);
- mCodec.releaseOutputBuffer(index, false /* render */);
- if ((mInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
- mSawOutputEOS = true;
- }
- break;
+ // now read encoded data from the encoder
+ int index = mCodec.dequeueOutputBuffer(mInfo, 0);
+ if (index >= 0) {
+ mBufIn = mInfo.size;
+ ByteBuffer out = mCodec.getOutputBuffer(index);
+ out.get(mBuf, 0 /* offset */, mBufIn /* length */);
+ mCodec.releaseOutputBuffer(index, false /* render */);
+ if ((mInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mSawOutputEOS = true;
}
}
}
diff --git a/android/media/AudioAttributes.java b/android/media/AudioAttributes.java
index 3b9a5de0..26ead3d1 100644
--- a/android/media/AudioAttributes.java
+++ b/android/media/AudioAttributes.java
@@ -19,12 +19,14 @@ package android.media;
import android.annotation.IntDef;
import android.annotation.NonNull;
import android.annotation.SystemApi;
+import android.media.AudioAttributesProto;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.Log;
import android.util.SparseIntArray;
+import android.util.proto.ProtoOutputStream;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -177,7 +179,7 @@ public final class AudioAttributes implements Parcelable {
/**
* IMPORTANT: when adding new usage types, add them to SDK_USAGES and update SUPPRESSIBLE_USAGES
- * if applicable.
+ * if applicable, as well as audioattributes.proto.
*/
/**
@@ -850,6 +852,21 @@ public final class AudioAttributes implements Parcelable {
}
/** @hide */
+ public void toProto(ProtoOutputStream proto) {
+ proto.write(AudioAttributesProto.USAGE, mUsage);
+ proto.write(AudioAttributesProto.CONTENT_TYPE, mContentType);
+ proto.write(AudioAttributesProto.FLAGS, mFlags);
+ // mFormattedTags is never null due to assignment in Builder or unmarshalling.
+ for (String t : mFormattedTags.split(";")) {
+ t = t.trim();
+ if (t != "") {
+ proto.write(AudioAttributesProto.TAGS, t);
+ }
+ }
+ // TODO: is the data in mBundle useful for debugging?
+ }
+
+ /** @hide */
public String usageToString() {
return usageToString(mUsage);
}
diff --git a/android/media/AudioManager.java b/android/media/AudioManager.java
index 186b2650..dab7632a 100644
--- a/android/media/AudioManager.java
+++ b/android/media/AudioManager.java
@@ -4119,7 +4119,15 @@ public class AudioManager {
Log.w(TAG, "updateAudioPortCache: listAudioPatches failed");
return status;
}
- } while (patchGeneration[0] != portGeneration[0]);
+ // Loop until patch generation is the same as port generation unless audio ports
+ // and audio patches are not null.
+ } while (patchGeneration[0] != portGeneration[0]
+ && (ports == null || patches == null));
+ // If the patch generation doesn't equal port generation, return ERROR here in case
+ // of mismatch between audio ports and audio patches.
+ if (patchGeneration[0] != portGeneration[0]) {
+ return ERROR;
+ }
for (int i = 0; i < newPatches.size(); i++) {
for (int j = 0; j < newPatches.get(i).sources().length; j++) {
diff --git a/android/media/AudioPortEventHandler.java b/android/media/AudioPortEventHandler.java
index c152245d..ac3904a2 100644
--- a/android/media/AudioPortEventHandler.java
+++ b/android/media/AudioPortEventHandler.java
@@ -17,6 +17,7 @@
package android.media;
import android.os.Handler;
+import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import java.util.ArrayList;
@@ -30,6 +31,7 @@ import java.lang.ref.WeakReference;
class AudioPortEventHandler {
private Handler mHandler;
+ private HandlerThread mHandlerThread;
private final ArrayList<AudioManager.OnAudioPortUpdateListener> mListeners =
new ArrayList<AudioManager.OnAudioPortUpdateListener>();
@@ -40,6 +42,8 @@ class AudioPortEventHandler {
private static final int AUDIOPORT_EVENT_SERVICE_DIED = 3;
private static final int AUDIOPORT_EVENT_NEW_LISTENER = 4;
+ private static final long RESCHEDULE_MESSAGE_DELAY_MS = 100;
+
/**
* Accessed by native methods: JNI Callback context.
*/
@@ -51,11 +55,12 @@ class AudioPortEventHandler {
if (mHandler != null) {
return;
}
- // find the looper for our new event handler
- Looper looper = Looper.getMainLooper();
+ // create a new thread for our new event handler
+ mHandlerThread = new HandlerThread(TAG);
+ mHandlerThread.start();
- if (looper != null) {
- mHandler = new Handler(looper) {
+ if (mHandlerThread.getLooper() != null) {
+ mHandler = new Handler(mHandlerThread.getLooper()) {
@Override
public void handleMessage(Message msg) {
ArrayList<AudioManager.OnAudioPortUpdateListener> listeners;
@@ -86,6 +91,12 @@ class AudioPortEventHandler {
if (msg.what != AUDIOPORT_EVENT_SERVICE_DIED) {
int status = AudioManager.updateAudioPortCache(ports, patches, null);
if (status != AudioManager.SUCCESS) {
+ // Since audio ports and audio patches are not null, the return
+ // value could be ERROR due to inconsistency between port generation
+ // and patch generation. In this case, we need to reschedule the
+ // message to make sure the native callback is done.
+ sendMessageDelayed(obtainMessage(msg.what, msg.obj),
+ RESCHEDULE_MESSAGE_DELAY_MS);
return;
}
}
@@ -132,6 +143,9 @@ class AudioPortEventHandler {
@Override
protected void finalize() {
native_finalize();
+ if (mHandlerThread.isAlive()) {
+ mHandlerThread.quit();
+ }
}
private native void native_finalize();
@@ -168,6 +182,10 @@ class AudioPortEventHandler {
Handler handler = eventHandler.handler();
if (handler != null) {
Message m = handler.obtainMessage(what, arg1, arg2, obj);
+ if (what != AUDIOPORT_EVENT_NEW_LISTENER) {
+ // Except AUDIOPORT_EVENT_NEW_LISTENER, we can only respect the last message.
+ handler.removeMessages(what);
+ }
handler.sendMessage(m);
}
}
diff --git a/android/media/ExifInterface.java b/android/media/ExifInterface.java
index 1f5edfa0..ba41a7bd 100644
--- a/android/media/ExifInterface.java
+++ b/android/media/ExifInterface.java
@@ -2584,22 +2584,21 @@ public class ExifInterface {
ExifAttribute.createUShort(Integer.parseInt(height), mExifByteOrder));
}
- // Note that the rotation angle from MediaMetadataRetriever for heif images
- // are CCW, while rotation in ExifInterface orientations are CW.
String rotation = retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
if (rotation != null) {
int orientation = ExifInterface.ORIENTATION_NORMAL;
+ // all rotation angles in CW
switch (Integer.parseInt(rotation)) {
case 90:
- orientation = ExifInterface.ORIENTATION_ROTATE_270;
+ orientation = ExifInterface.ORIENTATION_ROTATE_90;
break;
case 180:
orientation = ExifInterface.ORIENTATION_ROTATE_180;
break;
case 270:
- orientation = ExifInterface.ORIENTATION_ROTATE_90;
+ orientation = ExifInterface.ORIENTATION_ROTATE_270;
break;
}
diff --git a/android/media/MediaCodecInfo.java b/android/media/MediaCodecInfo.java
index f85925d8..f41e33f7 100644
--- a/android/media/MediaCodecInfo.java
+++ b/android/media/MediaCodecInfo.java
@@ -2749,8 +2749,8 @@ public final class MediaCodecInfo {
mQualityRange = Utils
.parseIntRange(info.getString("quality-range"), mQualityRange);
}
- if (info.containsKey("feature-bitrate-control")) {
- for (String mode: info.getString("feature-bitrate-control").split(",")) {
+ if (info.containsKey("feature-bitrate-modes")) {
+ for (String mode: info.getString("feature-bitrate-modes").split(",")) {
mBitControl |= parseBitrateMode(mode);
}
}
diff --git a/android/media/MediaRouter.java b/android/media/MediaRouter.java
index fe427a73..70ab8632 100644
--- a/android/media/MediaRouter.java
+++ b/android/media/MediaRouter.java
@@ -184,13 +184,15 @@ public class MediaRouter {
void updateAudioRoutes(AudioRoutesInfo newRoutes) {
boolean audioRoutesChanged = false;
+ boolean forceUseDefaultRoute = false;
+
if (newRoutes.mainType != mCurAudioRoutesInfo.mainType) {
mCurAudioRoutesInfo.mainType = newRoutes.mainType;
int name;
- if ((newRoutes.mainType&AudioRoutesInfo.MAIN_HEADPHONES) != 0
- || (newRoutes.mainType&AudioRoutesInfo.MAIN_HEADSET) != 0) {
+ if ((newRoutes.mainType & AudioRoutesInfo.MAIN_HEADPHONES) != 0
+ || (newRoutes.mainType & AudioRoutesInfo.MAIN_HEADSET) != 0) {
name = com.android.internal.R.string.default_audio_route_name_headphones;
- } else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_DOCK_SPEAKERS) != 0) {
+ } else if ((newRoutes.mainType & AudioRoutesInfo.MAIN_DOCK_SPEAKERS) != 0) {
name = com.android.internal.R.string.default_audio_route_name_dock_speakers;
} else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_HDMI) != 0) {
name = com.android.internal.R.string.default_audio_route_name_hdmi;
@@ -201,11 +203,16 @@ public class MediaRouter {
}
mDefaultAudioVideo.mNameResId = name;
dispatchRouteChanged(mDefaultAudioVideo);
+
+ if ((newRoutes.mainType & (AudioRoutesInfo.MAIN_HEADSET
+ | AudioRoutesInfo.MAIN_HEADPHONES | AudioRoutesInfo.MAIN_USB)) != 0) {
+ forceUseDefaultRoute = true;
+ }
audioRoutesChanged = true;
}
- final int mainType = mCurAudioRoutesInfo.mainType;
if (!TextUtils.equals(newRoutes.bluetoothName, mCurAudioRoutesInfo.bluetoothName)) {
+ forceUseDefaultRoute = false;
mCurAudioRoutesInfo.bluetoothName = newRoutes.bluetoothName;
if (mCurAudioRoutesInfo.bluetoothName != null) {
if (mBluetoothA2dpRoute == null) {
@@ -231,30 +238,21 @@ public class MediaRouter {
}
if (audioRoutesChanged) {
- selectRouteStatic(ROUTE_TYPE_LIVE_AUDIO, getDefaultSystemAudioRoute(), false);
Log.v(TAG, "Audio routes updated: " + newRoutes + ", a2dp=" + isBluetoothA2dpOn());
+ if (mSelectedRoute == null || mSelectedRoute == mDefaultAudioVideo
+ || mSelectedRoute == mBluetoothA2dpRoute) {
+ if (forceUseDefaultRoute || mBluetoothA2dpRoute == null) {
+ selectRouteStatic(ROUTE_TYPE_LIVE_AUDIO, mDefaultAudioVideo, false);
+ } else {
+ selectRouteStatic(ROUTE_TYPE_LIVE_AUDIO, mBluetoothA2dpRoute, false);
+ }
+ }
}
}
- RouteInfo getDefaultSystemAudioRoute() {
- boolean globalBluetoothA2doOn = false;
- try {
- globalBluetoothA2doOn = mMediaRouterService.isGlobalBluetoothA2doOn();
- } catch (RemoteException ex) {
- Log.e(TAG, "Unable to call isSystemBluetoothA2doOn.", ex);
- }
- return (globalBluetoothA2doOn && mBluetoothA2dpRoute != null)
- ? mBluetoothA2dpRoute : mDefaultAudioVideo;
- }
-
- RouteInfo getCurrentSystemAudioRoute() {
- return (isBluetoothA2dpOn() && mBluetoothA2dpRoute != null)
- ? mBluetoothA2dpRoute : mDefaultAudioVideo;
- }
-
boolean isBluetoothA2dpOn() {
try {
- return mAudioService.isBluetoothA2dpOn();
+ return mBluetoothA2dpRoute != null && mAudioService.isBluetoothA2dpOn();
} catch (RemoteException e) {
Log.e(TAG, "Error querying Bluetooth A2DP state", e);
return false;
@@ -602,13 +600,20 @@ public class MediaRouter {
@Override
public void onRestoreRoute() {
- // Skip restoring route if the selected route is not a system audio route, or
- // MediaRouter is initializing.
- if ((mSelectedRoute != mDefaultAudioVideo && mSelectedRoute != mBluetoothA2dpRoute)
- || mSelectedRoute == null) {
- return;
- }
- mSelectedRoute.select();
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ // Skip restoring route if the selected route is not a system audio route,
+ // MediaRouter is initializing, or mClient was changed.
+ if (Client.this != mClient || mSelectedRoute == null
+ || (mSelectedRoute != mDefaultAudioVideo
+ && mSelectedRoute != mBluetoothA2dpRoute)) {
+ return;
+ }
+ Log.v(TAG, "onRestoreRoute() : route=" + mSelectedRoute);
+ mSelectedRoute.select();
+ }
+ });
}
}
}
@@ -940,10 +945,12 @@ public class MediaRouter {
Log.v(TAG, "Selecting route: " + route);
assert(route != null);
final RouteInfo oldRoute = sStatic.mSelectedRoute;
+ final RouteInfo currentSystemRoute = sStatic.isBluetoothA2dpOn()
+ ? sStatic.mBluetoothA2dpRoute : sStatic.mDefaultAudioVideo;
boolean wasDefaultOrBluetoothRoute = (oldRoute == sStatic.mDefaultAudioVideo
|| oldRoute == sStatic.mBluetoothA2dpRoute);
if (oldRoute == route
- && (!wasDefaultOrBluetoothRoute || route == sStatic.getCurrentSystemAudioRoute())) {
+ && (!wasDefaultOrBluetoothRoute || route == currentSystemRoute)) {
return;
}
if (!route.matchesTypes(types)) {
@@ -1014,8 +1021,7 @@ public class MediaRouter {
static void selectDefaultRouteStatic() {
// TODO: Be smarter about the route types here; this selects for all valid.
- if (sStatic.mSelectedRoute != sStatic.mBluetoothA2dpRoute
- && sStatic.mBluetoothA2dpRoute != null && sStatic.isBluetoothA2dpOn()) {
+ if (sStatic.mSelectedRoute != sStatic.mBluetoothA2dpRoute && sStatic.isBluetoothA2dpOn()) {
selectRouteStatic(ROUTE_TYPE_ANY, sStatic.mBluetoothA2dpRoute, false);
} else {
selectRouteStatic(ROUTE_TYPE_ANY, sStatic.mDefaultAudioVideo, false);
diff --git a/android/media/PlayerBase.java b/android/media/PlayerBase.java
index 4808d7a5..09449a18 100644
--- a/android/media/PlayerBase.java
+++ b/android/media/PlayerBase.java
@@ -127,8 +127,9 @@ public abstract class PlayerBase {
Log.e(TAG, "Error talking to audio service, STARTED state will not be tracked", e);
}
synchronized (mLock) {
+ boolean attributesChanged = (mAttributes != attr);
mAttributes = attr;
- updateAppOpsPlayAudio_sync();
+ updateAppOpsPlayAudio_sync(attributesChanged);
}
}
@@ -200,16 +201,13 @@ public abstract class PlayerBase {
}
void baseSetVolume(float leftVolume, float rightVolume) {
- final boolean hasAppOpsPlayAudio;
+ final boolean isRestricted;
synchronized (mLock) {
mLeftVolume = leftVolume;
mRightVolume = rightVolume;
- hasAppOpsPlayAudio = mHasAppOpsPlayAudio;
- if (isRestricted_sync()) {
- return;
- }
+ isRestricted = isRestricted_sync();
}
- playerSetVolume(!hasAppOpsPlayAudio/*muting*/,
+ playerSetVolume(isRestricted/*muting*/,
leftVolume * mPanMultiplierL, rightVolume * mPanMultiplierR);
}
@@ -250,7 +248,7 @@ public abstract class PlayerBase {
private void updateAppOpsPlayAudio() {
synchronized (mLock) {
- updateAppOpsPlayAudio_sync();
+ updateAppOpsPlayAudio_sync(false);
}
}
@@ -258,7 +256,7 @@ public abstract class PlayerBase {
* To be called whenever a condition that might affect audibility of this player is updated.
* Must be called synchronized on mLock.
*/
- void updateAppOpsPlayAudio_sync() {
+ void updateAppOpsPlayAudio_sync(boolean attributesChanged) {
boolean oldHasAppOpsPlayAudio = mHasAppOpsPlayAudio;
try {
int mode = AppOpsManager.MODE_IGNORED;
@@ -275,9 +273,10 @@ public abstract class PlayerBase {
// AppsOps alters a player's volume; when the restriction changes, reflect it on the actual
// volume used by the player
try {
- if (oldHasAppOpsPlayAudio != mHasAppOpsPlayAudio) {
+ if (oldHasAppOpsPlayAudio != mHasAppOpsPlayAudio ||
+ attributesChanged) {
getService().playerHasOpPlayAudio(mPlayerIId, mHasAppOpsPlayAudio);
- if (mHasAppOpsPlayAudio) {
+ if (!isRestricted_sync()) {
if (DEBUG_APP_OPS) {
Log.v(TAG, "updateAppOpsPlayAudio: unmuting player, vol=" + mLeftVolume
+ "/" + mRightVolume);